use crate::{crypto_hash::sha256::sha256, fft::EvaluationDomain, polycommit::kzg10, Prepare};
use hashbrown::{HashMap, HashSet};
use snarkvm_curves::{PairingCurve, PairingEngine, ProjectiveCurve};
use snarkvm_fields::{ConstraintFieldError, Field, PrimeField, ToConstraintField};
use snarkvm_utilities::{error, serialize::*, FromBytes, ToBytes};
use std::{
borrow::{Borrow, Cow},
collections::{BTreeMap, BTreeSet},
fmt,
ops::{AddAssign, MulAssign, SubAssign},
};
use super::{LabeledPolynomial, PolynomialInfo};
pub type UniversalParams<E> = kzg10::UniversalParams<E>;
pub type Randomness<E> = kzg10::KZGRandomness<E>;
pub type Commitment<E> = kzg10::KZGCommitment<E>;
pub type PreparedCommitment<E> = kzg10::PreparedKZGCommitment<E>;
impl<E: PairingEngine> Prepare for Commitment<E> {
type Prepared = PreparedCommitment<E>;
fn prepare(&self) -> PreparedCommitment<E> {
let mut prepared_comm = Vec::<E::G1Affine>::new();
let mut cur = E::G1Projective::from(self.0);
for _ in 0..128 {
prepared_comm.push(cur.into());
cur.double_in_place();
}
kzg10::PreparedKZGCommitment::<E>(prepared_comm)
}
}
#[derive(Clone, Debug, Default, Hash, CanonicalSerialize, CanonicalDeserialize, PartialEq, Eq)]
pub struct CommitterKey<E: PairingEngine> {
pub powers_of_beta_g: Vec<E::G1Affine>,
pub lagrange_bases_at_beta_g: BTreeMap<usize, Vec<E::G1Affine>>,
pub powers_of_beta_times_gamma_g: Vec<E::G1Affine>,
pub shifted_powers_of_beta_g: Option<Vec<E::G1Affine>>,
pub shifted_powers_of_beta_times_gamma_g: Option<BTreeMap<usize, Vec<E::G1Affine>>>,
pub enforced_degree_bounds: Option<Vec<usize>>,
pub max_degree: usize,
}
impl<E: PairingEngine> FromBytes for CommitterKey<E> {
fn read_le<R: Read>(mut reader: R) -> io::Result<Self> {
let powers_len: u32 = FromBytes::read_le(&mut reader)?;
let mut powers_of_beta_g = Vec::with_capacity(powers_len as usize);
for _ in 0..powers_len {
let power: E::G1Affine = FromBytes::read_le(&mut reader)?;
powers_of_beta_g.push(power);
}
let lagrange_bases_at_beta_len: u32 = FromBytes::read_le(&mut reader)?;
let mut lagrange_bases_at_beta_g = BTreeMap::new();
for _ in 0..lagrange_bases_at_beta_len {
let size: u32 = FromBytes::read_le(&mut reader)?;
let mut basis = Vec::with_capacity(size as usize);
for _ in 0..size {
let power: E::G1Affine = FromBytes::read_le(&mut reader)?;
basis.push(power);
}
lagrange_bases_at_beta_g.insert(size as usize, basis);
}
let powers_of_beta_times_gamma_g_len: u32 = FromBytes::read_le(&mut reader)?;
let mut powers_of_beta_times_gamma_g = Vec::with_capacity(powers_of_beta_times_gamma_g_len as usize);
for _ in 0..powers_of_beta_times_gamma_g_len {
let powers_of_g: E::G1Affine = FromBytes::read_le(&mut reader)?;
powers_of_beta_times_gamma_g.push(powers_of_g);
}
let has_shifted_powers_of_beta_g: bool = FromBytes::read_le(&mut reader)?;
let shifted_powers_of_beta_g = match has_shifted_powers_of_beta_g {
true => {
let shifted_powers_len: u32 = FromBytes::read_le(&mut reader)?;
let mut shifted_powers_of_beta_g = Vec::with_capacity(shifted_powers_len as usize);
for _ in 0..shifted_powers_len {
let shifted_power: E::G1Affine = FromBytes::read_le(&mut reader)?;
shifted_powers_of_beta_g.push(shifted_power);
}
Some(shifted_powers_of_beta_g)
}
false => None,
};
let has_shifted_powers_of_beta_times_gamma_g: bool = FromBytes::read_le(&mut reader)?;
let shifted_powers_of_beta_times_gamma_g = match has_shifted_powers_of_beta_times_gamma_g {
true => {
let mut shifted_powers_of_beta_times_gamma_g = BTreeMap::new();
let shifted_powers_of_beta_times_gamma_g_num_elements: u32 = FromBytes::read_le(&mut reader)?;
for _ in 0..shifted_powers_of_beta_times_gamma_g_num_elements {
let key: u32 = FromBytes::read_le(&mut reader)?;
let value_len: u32 = FromBytes::read_le(&mut reader)?;
let mut value = Vec::with_capacity(value_len as usize);
for _ in 0..value_len {
let val: E::G1Affine = FromBytes::read_le(&mut reader)?;
value.push(val);
}
shifted_powers_of_beta_times_gamma_g.insert(key as usize, value);
}
Some(shifted_powers_of_beta_times_gamma_g)
}
false => None,
};
let has_enforced_degree_bounds: bool = FromBytes::read_le(&mut reader)?;
let enforced_degree_bounds = match has_enforced_degree_bounds {
true => {
let enforced_degree_bounds_len: u32 = FromBytes::read_le(&mut reader)?;
let mut enforced_degree_bounds = Vec::with_capacity(enforced_degree_bounds_len as usize);
for _ in 0..enforced_degree_bounds_len {
let enforced_degree_bound: u32 = FromBytes::read_le(&mut reader)?;
enforced_degree_bounds.push(enforced_degree_bound as usize);
}
Some(enforced_degree_bounds)
}
false => None,
};
let max_degree: u32 = FromBytes::read_le(&mut reader)?;
let mut hash_input = powers_of_beta_g.to_bytes_le().map_err(|_| error("Could not serialize powers"))?;
hash_input.extend_from_slice(
&powers_of_beta_times_gamma_g
.to_bytes_le()
.map_err(|_| error("Could not serialize powers_of_beta_times_gamma_g"))?,
);
if let Some(shifted_powers_of_beta_g) = &shifted_powers_of_beta_g {
hash_input.extend_from_slice(
&shifted_powers_of_beta_g
.to_bytes_le()
.map_err(|_| error("Could not serialize shifted_powers_of_beta_g"))?,
);
}
if let Some(shifted_powers_of_beta_times_gamma_g) = &shifted_powers_of_beta_times_gamma_g {
for value in shifted_powers_of_beta_times_gamma_g.values() {
hash_input.extend_from_slice(
&value.to_bytes_le().map_err(|_| error("Could not serialize shifted_power_of_gamma_g"))?,
);
}
}
let hash = sha256(&hash_input);
let expected_hash: [u8; 32] = FromBytes::read_le(&mut reader)?;
if expected_hash != hash {
return Err(error("Mismatching group elements"));
}
Ok(Self {
powers_of_beta_g,
lagrange_bases_at_beta_g,
powers_of_beta_times_gamma_g,
shifted_powers_of_beta_g,
shifted_powers_of_beta_times_gamma_g,
enforced_degree_bounds,
max_degree: max_degree as usize,
})
}
}
impl<E: PairingEngine> ToBytes for CommitterKey<E> {
fn write_le<W: Write>(&self, mut writer: W) -> io::Result<()> {
(self.powers_of_beta_g.len() as u32).write_le(&mut writer)?;
for power in &self.powers_of_beta_g {
power.write_le(&mut writer)?;
}
(self.lagrange_bases_at_beta_g.len() as u32).write_le(&mut writer)?;
for (size, powers) in &self.lagrange_bases_at_beta_g {
(*size as u32).write_le(&mut writer)?;
for power in powers {
power.write_le(&mut writer)?;
}
}
(self.powers_of_beta_times_gamma_g.len() as u32).write_le(&mut writer)?;
for power_of_gamma_g in &self.powers_of_beta_times_gamma_g {
power_of_gamma_g.write_le(&mut writer)?;
}
self.shifted_powers_of_beta_g.is_some().write_le(&mut writer)?;
if let Some(shifted_powers_of_beta_g) = &self.shifted_powers_of_beta_g {
(shifted_powers_of_beta_g.len() as u32).write_le(&mut writer)?;
for shifted_power in shifted_powers_of_beta_g {
shifted_power.write_le(&mut writer)?;
}
}
self.shifted_powers_of_beta_times_gamma_g.is_some().write_le(&mut writer)?;
if let Some(shifted_powers_of_beta_times_gamma_g) = &self.shifted_powers_of_beta_times_gamma_g {
(shifted_powers_of_beta_times_gamma_g.len() as u32).write_le(&mut writer)?;
for (key, shifted_powers_of_beta_g) in shifted_powers_of_beta_times_gamma_g {
(*key as u32).write_le(&mut writer)?;
(shifted_powers_of_beta_g.len() as u32).write_le(&mut writer)?;
for shifted_power in shifted_powers_of_beta_g {
shifted_power.write_le(&mut writer)?;
}
}
}
self.enforced_degree_bounds.is_some().write_le(&mut writer)?;
if let Some(enforced_degree_bounds) = &self.enforced_degree_bounds {
(enforced_degree_bounds.len() as u32).write_le(&mut writer)?;
for enforced_degree_bound in enforced_degree_bounds {
(*enforced_degree_bound as u32).write_le(&mut writer)?;
}
}
(self.max_degree as u32).write_le(&mut writer)?;
let mut hash_input = self.powers_of_beta_g.to_bytes_le().map_err(|_| error("Could not serialize powers"))?;
hash_input.extend_from_slice(
&self
.powers_of_beta_times_gamma_g
.to_bytes_le()
.map_err(|_| error("Could not serialize powers_of_beta_times_gamma_g"))?,
);
if let Some(shifted_powers_of_beta_g) = &self.shifted_powers_of_beta_g {
hash_input.extend_from_slice(
&shifted_powers_of_beta_g
.to_bytes_le()
.map_err(|_| error("Could not serialize shifted_powers_of_beta_g"))?,
);
}
if let Some(shifted_powers_of_beta_times_gamma_g) = &self.shifted_powers_of_beta_times_gamma_g {
for value in shifted_powers_of_beta_times_gamma_g.values() {
hash_input.extend_from_slice(
&value.to_bytes_le().map_err(|_| error("Could not serialize shifted_power_of_gamma_g"))?,
);
}
}
let hash = sha256(&hash_input);
hash.write_le(&mut writer)
}
}
impl<E: PairingEngine> CommitterKey<E> {
fn len(&self) -> usize {
if self.shifted_powers_of_beta_g.is_some() { self.shifted_powers_of_beta_g.as_ref().unwrap().len() } else { 0 }
}
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct CommitterUnionKey<'a, E: PairingEngine> {
pub powers_of_beta_g: Option<&'a Vec<E::G1Affine>>,
pub lagrange_bases_at_beta_g: BTreeMap<usize, &'a Vec<E::G1Affine>>,
pub powers_of_beta_times_gamma_g: Option<&'a Vec<E::G1Affine>>,
pub shifted_powers_of_beta_g: Option<&'a Vec<E::G1Affine>>,
pub shifted_powers_of_beta_times_gamma_g: Option<BTreeMap<usize, &'a Vec<E::G1Affine>>>,
pub enforced_degree_bounds: Option<Vec<usize>>,
pub max_degree: usize,
}
impl<'a, E: PairingEngine> CommitterUnionKey<'a, E> {
pub fn powers(&self) -> kzg10::Powers<E> {
kzg10::Powers {
powers_of_beta_g: self.powers_of_beta_g.unwrap().as_slice().into(),
powers_of_beta_times_gamma_g: self.powers_of_beta_times_gamma_g.unwrap().as_slice().into(),
}
}
pub fn shifted_powers_of_beta_g(&self, degree_bound: impl Into<Option<usize>>) -> Option<kzg10::Powers<E>> {
match (&self.shifted_powers_of_beta_g, &self.shifted_powers_of_beta_times_gamma_g) {
(Some(shifted_powers_of_beta_g), Some(shifted_powers_of_beta_times_gamma_g)) => {
let max_bound = self.enforced_degree_bounds.as_ref().unwrap().last().unwrap();
let (bound, powers_range) = if let Some(degree_bound) = degree_bound.into() {
assert!(self.enforced_degree_bounds.as_ref().unwrap().contains(°ree_bound));
(degree_bound, (max_bound - degree_bound)..)
} else {
(*max_bound, 0..)
};
let ck = kzg10::Powers {
powers_of_beta_g: shifted_powers_of_beta_g[powers_range].into(),
powers_of_beta_times_gamma_g: shifted_powers_of_beta_times_gamma_g[&bound].clone().into(),
};
Some(ck)
}
(_, _) => None,
}
}
pub fn lagrange_basis(&self, domain: EvaluationDomain<E::Fr>) -> Option<kzg10::LagrangeBasis<E>> {
self.lagrange_bases_at_beta_g.get(&domain.size()).map(|basis| kzg10::LagrangeBasis {
lagrange_basis_at_beta_g: Cow::Borrowed(basis),
powers_of_beta_times_gamma_g: Cow::Borrowed(self.powers_of_beta_times_gamma_g.unwrap()),
domain,
})
}
pub fn max_degree(&self) -> usize {
self.max_degree
}
pub fn supported_degree(&self) -> usize {
self.powers_of_beta_g.unwrap().len() - 1
}
pub fn union<T: IntoIterator<Item = &'a CommitterKey<E>>>(committer_keys: T) -> Self {
let mut ck_union = CommitterUnionKey::<E> {
powers_of_beta_g: None,
lagrange_bases_at_beta_g: BTreeMap::new(),
powers_of_beta_times_gamma_g: None,
shifted_powers_of_beta_g: None,
shifted_powers_of_beta_times_gamma_g: None,
enforced_degree_bounds: None,
max_degree: 0,
};
let mut enforced_degree_bounds = vec![];
let mut biggest_ck: Option<&CommitterKey<E>> = None;
let mut shifted_powers_of_beta_times_gamma_g = BTreeMap::new();
for ck in committer_keys {
if biggest_ck.is_none() || biggest_ck.unwrap().len() < ck.len() {
biggest_ck = Some(ck);
}
let lagrange_bases = &ck.lagrange_bases_at_beta_g;
for (bound_base, bases) in lagrange_bases.iter() {
ck_union.lagrange_bases_at_beta_g.entry(*bound_base).or_insert(bases);
}
if let Some(shifted_powers) = ck.shifted_powers_of_beta_times_gamma_g.as_ref() {
for (bound_power, powers) in shifted_powers.iter() {
shifted_powers_of_beta_times_gamma_g.entry(*bound_power).or_insert(powers);
}
}
if let Some(degree_bounds) = &ck.enforced_degree_bounds {
enforced_degree_bounds.append(&mut degree_bounds.clone());
}
}
let biggest_ck = biggest_ck.unwrap();
ck_union.powers_of_beta_g = Some(&biggest_ck.powers_of_beta_g);
ck_union.powers_of_beta_times_gamma_g = Some(&biggest_ck.powers_of_beta_times_gamma_g);
ck_union.shifted_powers_of_beta_g = biggest_ck.shifted_powers_of_beta_g.as_ref();
ck_union.max_degree = biggest_ck.max_degree;
if !enforced_degree_bounds.is_empty() {
enforced_degree_bounds.sort();
enforced_degree_bounds.dedup();
ck_union.enforced_degree_bounds = Some(enforced_degree_bounds);
ck_union.shifted_powers_of_beta_times_gamma_g = Some(shifted_powers_of_beta_times_gamma_g);
}
ck_union
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct VerifierKey<E: PairingEngine> {
pub vk: kzg10::VerifierKey<E>,
pub degree_bounds_and_neg_powers_of_h: Option<Vec<(usize, E::G2Affine)>>,
pub degree_bounds_and_prepared_neg_powers_of_h: Option<Vec<(usize, <E::G2Affine as PairingCurve>::Prepared)>>,
pub supported_degree: usize,
pub max_degree: usize,
}
impl<E: PairingEngine> CanonicalSerialize for VerifierKey<E> {
fn serialize_with_mode<W: Write>(&self, mut writer: W, compress: Compress) -> Result<(), SerializationError> {
self.vk.serialize_with_mode(&mut writer, compress)?;
self.degree_bounds_and_neg_powers_of_h.serialize_with_mode(&mut writer, compress)?;
self.supported_degree.serialize_with_mode(&mut writer, compress)?;
self.max_degree.serialize_with_mode(&mut writer, compress)?;
Ok(())
}
fn serialized_size(&self, compress: Compress) -> usize {
self.vk.serialized_size(compress)
+ self.degree_bounds_and_neg_powers_of_h.serialized_size(compress)
+ self.supported_degree.serialized_size(compress)
+ self.max_degree.serialized_size(compress)
}
}
impl<E: PairingEngine> CanonicalDeserialize for VerifierKey<E> {
fn deserialize_with_mode<R: Read>(
mut reader: R,
compress: Compress,
validate: Validate,
) -> Result<Self, SerializationError> {
let vk = CanonicalDeserialize::deserialize_with_mode(&mut reader, compress, validate)?;
let degree_bounds_and_neg_powers_of_h: Option<Vec<(usize, E::G2Affine)>> =
CanonicalDeserialize::deserialize_with_mode(&mut reader, compress, validate)?;
let supported_degree = CanonicalDeserialize::deserialize_with_mode(&mut reader, compress, validate)?;
let max_degree = CanonicalDeserialize::deserialize_with_mode(&mut reader, compress, validate)?;
let degree_bounds_and_prepared_neg_powers_of_h =
degree_bounds_and_neg_powers_of_h.as_ref().map(|v| v.iter().map(|(b, pow)| (*b, pow.prepare())).collect());
Ok(VerifierKey {
vk,
degree_bounds_and_neg_powers_of_h,
degree_bounds_and_prepared_neg_powers_of_h,
supported_degree,
max_degree,
})
}
}
impl<E: PairingEngine> Valid for VerifierKey<E> {
fn check(&self) -> Result<(), SerializationError> {
Valid::check(&self.vk)?;
Valid::check(&self.degree_bounds_and_neg_powers_of_h)?;
Valid::check(&self.supported_degree)?;
Valid::check(&self.max_degree)?;
Ok(())
}
fn batch_check<'a>(batch: impl Iterator<Item = &'a Self> + Send) -> Result<(), SerializationError>
where
Self: 'a,
{
let batch: Vec<_> = batch.collect();
Valid::batch_check(batch.iter().map(|v| &v.vk))?;
Valid::batch_check(batch.iter().map(|v| &v.degree_bounds_and_neg_powers_of_h))?;
Valid::batch_check(batch.iter().map(|v| &v.supported_degree))?;
Valid::batch_check(batch.iter().map(|v| &v.max_degree))?;
Ok(())
}
}
impl<E: PairingEngine> FromBytes for VerifierKey<E> {
fn read_le<R: Read>(mut reader: R) -> io::Result<Self> {
CanonicalDeserialize::deserialize_compressed(&mut reader)
.map_err(|_| error("could not deserialize VerifierKey"))
}
}
impl<E: PairingEngine> ToBytes for VerifierKey<E> {
fn write_le<W: Write>(&self, mut writer: W) -> io::Result<()> {
CanonicalSerialize::serialize_compressed(self, &mut writer)
.map_err(|_| error("could not serialize VerifierKey"))
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct VerifierUnionKey<'a, E: PairingEngine> {
pub vk: &'a kzg10::VerifierKey<E>,
pub degree_bounds_and_neg_powers_of_h: Option<Vec<(usize, &'a E::G2Affine)>>,
pub degree_bounds_and_prepared_neg_powers_of_h: Option<Vec<(usize, &'a <E::G2Affine as PairingCurve>::Prepared)>>,
pub supported_degree: usize,
pub max_degree: usize,
}
impl<'a, E: PairingEngine> VerifierUnionKey<'a, E> {
pub fn get_shift_power(&self, degree_bound: usize) -> Option<&E::G2Affine> {
self.degree_bounds_and_neg_powers_of_h
.as_ref()
.and_then(|v| v.binary_search_by(|(d, _)| d.cmp(°ree_bound)).ok().map(|i| v[i].1))
}
pub fn get_prepared_shift_power(&self, degree_bound: usize) -> Option<<E::G2Affine as PairingCurve>::Prepared> {
self.degree_bounds_and_prepared_neg_powers_of_h
.as_ref()
.and_then(|v| v.binary_search_by(|(d, _)| d.cmp(°ree_bound)).ok().map(|i| v[i].1.clone()))
}
pub fn max_degree(&self) -> usize {
self.max_degree
}
pub fn union<T: IntoIterator<Item = &'a VerifierKey<E>>>(verifier_keys: T) -> Self {
let mut bounds_seen = HashSet::<usize>::new();
let mut bounds_and_neg_powers = vec![];
let mut bounds_and_prepared_neg_powers = vec![];
let mut biggest_vk: Option<&VerifierKey<E>> = None;
for vk in verifier_keys {
if biggest_vk.is_none() || biggest_vk.unwrap().supported_degree < vk.supported_degree {
biggest_vk = Some(vk);
}
let new_bounds = vk.degree_bounds_and_neg_powers_of_h.as_ref().unwrap();
let new_prep_bounds = vk.degree_bounds_and_prepared_neg_powers_of_h.as_ref().unwrap();
assert_eq!(new_bounds.len(), new_prep_bounds.len());
for ((bound, neg_powers), (bound2, prep_neg_powers)) in new_bounds.iter().zip(new_prep_bounds) {
assert_eq!(bound, bound2);
if bounds_seen.insert(*bound) {
bounds_and_neg_powers.push((*bound, neg_powers));
bounds_and_prepared_neg_powers.push((*bound, prep_neg_powers));
}
}
}
let biggest_vk = biggest_vk.unwrap();
let mut vk_union = VerifierUnionKey::<E> {
vk: &biggest_vk.vk,
degree_bounds_and_neg_powers_of_h: None,
degree_bounds_and_prepared_neg_powers_of_h: None,
supported_degree: biggest_vk.supported_degree,
max_degree: biggest_vk.max_degree,
};
if !bounds_and_neg_powers.is_empty() {
bounds_and_neg_powers.sort_by(|a, b| a.0.cmp(&b.0));
bounds_and_neg_powers.dedup_by(|a, b| a.0 <= b.0);
vk_union.degree_bounds_and_neg_powers_of_h = Some(bounds_and_neg_powers);
}
if !bounds_and_prepared_neg_powers.is_empty() {
bounds_and_prepared_neg_powers.sort_by(|a, b| a.0.cmp(&b.0));
bounds_and_prepared_neg_powers.dedup_by(|a, b| a.0 <= b.0);
vk_union.degree_bounds_and_prepared_neg_powers_of_h = Some(bounds_and_prepared_neg_powers);
}
vk_union
}
}
impl<E: PairingEngine> ToConstraintField<E::Fq> for VerifierKey<E> {
fn to_field_elements(&self) -> Result<Vec<E::Fq>, ConstraintFieldError> {
let mut res = Vec::new();
res.extend_from_slice(&self.vk.to_field_elements()?);
if let Some(degree_bounds_and_neg_powers_of_h) = &self.degree_bounds_and_neg_powers_of_h {
for (d, neg_powers_of_h) in degree_bounds_and_neg_powers_of_h.iter() {
let d_elem: E::Fq = (*d as u64).into();
res.push(d_elem);
res.append(&mut neg_powers_of_h.to_field_elements()?);
}
}
Ok(res)
}
}
#[derive(Clone, Debug)]
pub struct PreparedVerifierKey<E: PairingEngine> {
pub prepared_vk: kzg10::PreparedVerifierKey<E>,
pub degree_bounds_and_prepared_neg_powers_of_h: Option<Vec<(usize, <E::G2Affine as PairingCurve>::Prepared)>>,
pub max_degree: usize,
pub supported_degree: usize,
}
impl<E: PairingEngine> PreparedVerifierKey<E> {
pub fn get_prepared_shift_power(&self, bound: usize) -> Option<<E::G2Affine as PairingCurve>::Prepared> {
self.degree_bounds_and_prepared_neg_powers_of_h
.as_ref()
.and_then(|v| v.binary_search_by(|(d, _)| d.cmp(&bound)).ok().map(|i| v[i].1.clone()))
}
}
impl<E: PairingEngine> Prepare for VerifierKey<E> {
type Prepared = PreparedVerifierKey<E>;
fn prepare(&self) -> PreparedVerifierKey<E> {
let prepared_vk = kzg10::PreparedVerifierKey::<E>::prepare(&self.vk);
PreparedVerifierKey::<E> {
prepared_vk,
degree_bounds_and_prepared_neg_powers_of_h: self.degree_bounds_and_prepared_neg_powers_of_h.clone(),
max_degree: self.max_degree,
supported_degree: self.supported_degree,
}
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, CanonicalSerialize, CanonicalDeserialize)]
pub struct BatchProof<E: PairingEngine>(pub(crate) Vec<kzg10::KZGProof<E>>);
impl<E: PairingEngine> BatchProof<E> {
pub fn is_hiding(&self) -> bool {
self.0.iter().any(|c| c.is_hiding())
}
}
pub type PolynomialLabel = String;
#[derive(Clone, Debug, CanonicalSerialize, PartialEq, Eq)]
pub struct LabeledCommitment<C: CanonicalSerialize + 'static> {
label: PolynomialLabel,
commitment: C,
degree_bound: Option<usize>,
}
impl<F: Field, C: CanonicalSerialize + ToConstraintField<F>> ToConstraintField<F> for LabeledCommitment<C> {
fn to_field_elements(&self) -> Result<Vec<F>, ConstraintFieldError> {
self.commitment.to_field_elements()
}
}
impl<C: CanonicalSerialize + ToBytes> ToBytes for LabeledCommitment<C> {
fn write_le<W: Write>(&self, mut writer: W) -> io::Result<()> {
CanonicalSerialize::serialize_compressed(&self.commitment, &mut writer)
.map_err(|_| error("could not serialize struct"))
}
}
impl<C: CanonicalSerialize> LabeledCommitment<C> {
pub fn new(label: PolynomialLabel, commitment: C, degree_bound: Option<usize>) -> Self {
Self { label, commitment, degree_bound }
}
pub fn new_with_info(info: &PolynomialInfo, commitment: C) -> Self {
Self { label: info.label().to_string(), commitment, degree_bound: info.degree_bound() }
}
pub fn label(&self) -> &str {
&self.label
}
pub fn commitment(&self) -> &C {
&self.commitment
}
pub fn degree_bound(&self) -> Option<usize> {
self.degree_bound
}
}
#[derive(Hash, Ord, PartialOrd, Clone, Eq, PartialEq)]
pub enum LCTerm {
One,
PolyLabel(String),
}
impl fmt::Debug for LCTerm {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
LCTerm::One => write!(f, "1"),
LCTerm::PolyLabel(label) => write!(f, "{label}"),
}
}
}
impl LCTerm {
#[inline]
pub fn is_one(&self) -> bool {
matches!(self, LCTerm::One)
}
}
impl From<PolynomialLabel> for LCTerm {
fn from(other: PolynomialLabel) -> Self {
Self::PolyLabel(other)
}
}
impl<'a> From<&'a str> for LCTerm {
fn from(other: &str) -> Self {
Self::PolyLabel(other.into())
}
}
impl core::convert::TryInto<PolynomialLabel> for LCTerm {
type Error = ();
fn try_into(self) -> Result<PolynomialLabel, ()> {
match self {
Self::One => Err(()),
Self::PolyLabel(l) => Ok(l),
}
}
}
impl<'a> core::convert::TryInto<&'a PolynomialLabel> for &'a LCTerm {
type Error = ();
fn try_into(self) -> Result<&'a PolynomialLabel, ()> {
match self {
LCTerm::One => Err(()),
LCTerm::PolyLabel(l) => Ok(l),
}
}
}
impl<B: Borrow<String>> PartialEq<B> for LCTerm {
fn eq(&self, other: &B) -> bool {
match self {
Self::One => false,
Self::PolyLabel(l) => l == other.borrow(),
}
}
}
#[derive(Clone, Debug)]
pub struct LinearCombination<F> {
pub label: String,
pub terms: BTreeMap<LCTerm, F>,
}
#[allow(clippy::or_fun_call)]
impl<F: Field> LinearCombination<F> {
pub fn empty(label: impl Into<String>) -> Self {
Self { label: label.into(), terms: BTreeMap::new() }
}
pub fn new(label: impl Into<String>, _terms: impl IntoIterator<Item = (F, impl Into<LCTerm>)>) -> Self {
let mut terms = BTreeMap::new();
for (c, l) in _terms.into_iter().map(|(c, t)| (c, t.into())) {
*terms.entry(l).or_insert(F::zero()) += c;
}
Self { label: label.into(), terms }
}
pub fn label(&self) -> &str {
&self.label
}
pub fn is_empty(&self) -> bool {
self.terms.is_empty()
}
pub fn add(&mut self, c: F, t: impl Into<LCTerm>) -> &mut Self {
let t = t.into();
*self.terms.entry(t.clone()).or_insert(F::zero()) += c;
if self.terms[&t].is_zero() {
self.terms.remove(&t);
}
self
}
pub fn len(&self) -> usize {
self.terms.len()
}
pub fn iter(&self) -> impl Iterator<Item = (&F, &LCTerm)> {
self.terms.iter().map(|(t, c)| (c, t))
}
}
impl<'a, F: Field> AddAssign<(F, &'a LinearCombination<F>)> for LinearCombination<F> {
#[allow(clippy::suspicious_op_assign_impl)]
fn add_assign(&mut self, (coeff, other): (F, &'a LinearCombination<F>)) {
for (t, c) in other.terms.iter() {
self.add(coeff * c, t.clone());
}
}
}
impl<'a, F: Field> SubAssign<(F, &'a LinearCombination<F>)> for LinearCombination<F> {
#[allow(clippy::suspicious_op_assign_impl)]
fn sub_assign(&mut self, (coeff, other): (F, &'a LinearCombination<F>)) {
for (t, c) in other.terms.iter() {
self.add(-coeff * c, t.clone());
}
}
}
impl<'a, F: Field> AddAssign<&'a LinearCombination<F>> for LinearCombination<F> {
fn add_assign(&mut self, other: &'a LinearCombination<F>) {
for (t, c) in other.terms.iter() {
self.add(*c, t.clone());
}
}
}
impl<'a, F: Field> SubAssign<&'a LinearCombination<F>> for LinearCombination<F> {
fn sub_assign(&mut self, other: &'a LinearCombination<F>) {
for (t, &c) in other.terms.iter() {
self.add(-c, t.clone());
}
}
}
impl<F: Field> AddAssign<F> for LinearCombination<F> {
fn add_assign(&mut self, coeff: F) {
self.add(coeff, LCTerm::One);
}
}
impl<F: Field> SubAssign<F> for LinearCombination<F> {
fn sub_assign(&mut self, coeff: F) {
self.add(-coeff, LCTerm::One);
}
}
impl<F: Field> MulAssign<F> for LinearCombination<F> {
fn mul_assign(&mut self, coeff: F) {
self.terms.values_mut().for_each(|c| *c *= &coeff);
}
}
pub type QuerySet<T> = BTreeSet<(String, (String, T))>;
pub type Evaluations<F> = BTreeMap<(String, F), F>;
pub fn evaluate_query_set<'a, F: PrimeField>(
polys: impl IntoIterator<Item = &'a LabeledPolynomial<F>>,
query_set: &QuerySet<F>,
) -> Evaluations<F> {
let polys: HashMap<_, _> = polys.into_iter().map(|p| (p.label(), p)).collect();
let mut evaluations = Evaluations::new();
for (label, (_point_name, point)) in query_set {
let poly = polys.get(label as &str).expect("polynomial in evaluated lc is not found");
let eval = poly.evaluate(*point);
evaluations.insert((label.clone(), *point), eval);
}
evaluations
}
#[derive(Clone, Debug, PartialEq, Eq, CanonicalSerialize, CanonicalDeserialize)]
pub struct BatchLCProof<E: PairingEngine> {
pub proof: BatchProof<E>,
pub evaluations: Option<Vec<E::Fr>>,
}
impl<E: PairingEngine> BatchLCProof<E> {
pub fn is_hiding(&self) -> bool {
self.proof.is_hiding()
}
}
impl<E: PairingEngine> FromBytes for BatchLCProof<E> {
fn read_le<R: Read>(mut reader: R) -> io::Result<Self> {
CanonicalDeserialize::deserialize_compressed(&mut reader).map_err(|_| error("could not deserialize struct"))
}
}
impl<E: PairingEngine> ToBytes for BatchLCProof<E> {
fn write_le<W: Write>(&self, mut writer: W) -> io::Result<()> {
CanonicalSerialize::serialize_compressed(self, &mut writer).map_err(|_| error("could not serialize struct"))
}
}