use super::{LabeledPolynomial, PolynomialInfo};
use crate::{crypto_hash::sha256::sha256, fft::EvaluationDomain, polycommit::kzg10};
use snarkvm_curves::PairingEngine;
use snarkvm_fields::{ConstraintFieldError, Field, PrimeField, ToConstraintField};
use snarkvm_utilities::{FromBytes, ToBytes, error, serialize::*};
use hashbrown::HashMap;
use std::{
borrow::{Borrow, Cow},
collections::{BTreeMap, BTreeSet},
fmt,
ops::{AddAssign, MulAssign, SubAssign},
};
pub type UniversalParams<E> = kzg10::UniversalParams<E>;
pub type Randomness<E> = kzg10::KZGRandomness<E>;
pub type Commitment<E> = kzg10::KZGCommitment<E>;
#[derive(Debug)]
pub struct CommitterKey<E: PairingEngine> {
pub powers_of_beta_g: Vec<E::G1Affine>,
pub lagrange_bases_at_beta_g: BTreeMap<usize, Vec<E::G1Affine>>,
pub powers_of_beta_times_gamma_g: Vec<E::G1Affine>,
pub shifted_powers_of_beta_g: Option<Vec<E::G1Affine>>,
pub shifted_powers_of_beta_times_gamma_g: Option<BTreeMap<usize, Vec<E::G1Affine>>>,
pub enforced_degree_bounds: Option<Vec<usize>>,
}
impl<E: PairingEngine> FromBytes for CommitterKey<E> {
fn read_le<R: Read>(mut reader: R) -> io::Result<Self> {
let powers_len: u32 = FromBytes::read_le(&mut reader)?;
let mut powers_of_beta_g = Vec::with_capacity(powers_len as usize);
for _ in 0..powers_len {
let power: E::G1Affine = FromBytes::read_le(&mut reader)?;
powers_of_beta_g.push(power);
}
let lagrange_bases_at_beta_len: u32 = FromBytes::read_le(&mut reader)?;
let mut lagrange_bases_at_beta_g = BTreeMap::new();
for _ in 0..lagrange_bases_at_beta_len {
let size: u32 = FromBytes::read_le(&mut reader)?;
let mut basis = Vec::with_capacity(size as usize);
for _ in 0..size {
let power: E::G1Affine = FromBytes::read_le(&mut reader)?;
basis.push(power);
}
lagrange_bases_at_beta_g.insert(size as usize, basis);
}
let powers_of_beta_times_gamma_g_len: u32 = FromBytes::read_le(&mut reader)?;
let mut powers_of_beta_times_gamma_g = Vec::with_capacity(powers_of_beta_times_gamma_g_len as usize);
for _ in 0..powers_of_beta_times_gamma_g_len {
let powers_of_g: E::G1Affine = FromBytes::read_le(&mut reader)?;
powers_of_beta_times_gamma_g.push(powers_of_g);
}
let has_shifted_powers_of_beta_g: bool = FromBytes::read_le(&mut reader)?;
let shifted_powers_of_beta_g = match has_shifted_powers_of_beta_g {
true => {
let shifted_powers_len: u32 = FromBytes::read_le(&mut reader)?;
let mut shifted_powers_of_beta_g = Vec::with_capacity(shifted_powers_len as usize);
for _ in 0..shifted_powers_len {
let shifted_power: E::G1Affine = FromBytes::read_le(&mut reader)?;
shifted_powers_of_beta_g.push(shifted_power);
}
Some(shifted_powers_of_beta_g)
}
false => None,
};
let has_shifted_powers_of_beta_times_gamma_g: bool = FromBytes::read_le(&mut reader)?;
let shifted_powers_of_beta_times_gamma_g = match has_shifted_powers_of_beta_times_gamma_g {
true => {
let mut shifted_powers_of_beta_times_gamma_g = BTreeMap::new();
let shifted_powers_of_beta_times_gamma_g_num_elements: u32 = FromBytes::read_le(&mut reader)?;
for _ in 0..shifted_powers_of_beta_times_gamma_g_num_elements {
let key: u32 = FromBytes::read_le(&mut reader)?;
let value_len: u32 = FromBytes::read_le(&mut reader)?;
let mut value = Vec::with_capacity(value_len as usize);
for _ in 0..value_len {
let val: E::G1Affine = FromBytes::read_le(&mut reader)?;
value.push(val);
}
shifted_powers_of_beta_times_gamma_g.insert(key as usize, value);
}
Some(shifted_powers_of_beta_times_gamma_g)
}
false => None,
};
let has_enforced_degree_bounds: bool = FromBytes::read_le(&mut reader)?;
let enforced_degree_bounds = match has_enforced_degree_bounds {
true => {
let enforced_degree_bounds_len: u32 = FromBytes::read_le(&mut reader)?;
let mut enforced_degree_bounds = Vec::with_capacity(enforced_degree_bounds_len as usize);
for _ in 0..enforced_degree_bounds_len {
let enforced_degree_bound: u32 = FromBytes::read_le(&mut reader)?;
enforced_degree_bounds.push(enforced_degree_bound as usize);
}
Some(enforced_degree_bounds)
}
false => None,
};
let mut hash_input = powers_of_beta_g.to_bytes_le().map_err(|_| error("Could not serialize powers"))?;
powers_of_beta_times_gamma_g
.write_le(&mut hash_input)
.map_err(|_| error("Could not serialize powers_of_beta_times_gamma_g"))?;
if let Some(shifted_powers_of_beta_g) = &shifted_powers_of_beta_g {
shifted_powers_of_beta_g
.write_le(&mut hash_input)
.map_err(|_| error("Could not serialize shifted_powers_of_beta_g"))?;
}
if let Some(shifted_powers_of_beta_times_gamma_g) = &shifted_powers_of_beta_times_gamma_g {
for value in shifted_powers_of_beta_times_gamma_g.values() {
value.write_le(&mut hash_input).map_err(|_| error("Could not serialize shifted_power_of_gamma_g"))?;
}
}
let hash = sha256(&hash_input);
let expected_hash: [u8; 32] = FromBytes::read_le(&mut reader)?;
if expected_hash != hash {
return Err(error("Mismatching group elements"));
}
Ok(Self {
powers_of_beta_g,
lagrange_bases_at_beta_g,
powers_of_beta_times_gamma_g,
shifted_powers_of_beta_g,
shifted_powers_of_beta_times_gamma_g,
enforced_degree_bounds,
})
}
}
impl<E: PairingEngine> ToBytes for CommitterKey<E> {
fn write_le<W: Write>(&self, mut writer: W) -> io::Result<()> {
(self.powers_of_beta_g.len() as u32).write_le(&mut writer)?;
for power in &self.powers_of_beta_g {
power.write_le(&mut writer)?;
}
(self.lagrange_bases_at_beta_g.len() as u32).write_le(&mut writer)?;
for (size, powers) in &self.lagrange_bases_at_beta_g {
(*size as u32).write_le(&mut writer)?;
for power in powers {
power.write_le(&mut writer)?;
}
}
(self.powers_of_beta_times_gamma_g.len() as u32).write_le(&mut writer)?;
for power_of_gamma_g in &self.powers_of_beta_times_gamma_g {
power_of_gamma_g.write_le(&mut writer)?;
}
self.shifted_powers_of_beta_g.is_some().write_le(&mut writer)?;
if let Some(shifted_powers_of_beta_g) = &self.shifted_powers_of_beta_g {
(shifted_powers_of_beta_g.len() as u32).write_le(&mut writer)?;
for shifted_power in shifted_powers_of_beta_g {
shifted_power.write_le(&mut writer)?;
}
}
self.shifted_powers_of_beta_times_gamma_g.is_some().write_le(&mut writer)?;
if let Some(shifted_powers_of_beta_times_gamma_g) = &self.shifted_powers_of_beta_times_gamma_g {
(shifted_powers_of_beta_times_gamma_g.len() as u32).write_le(&mut writer)?;
for (key, shifted_powers_of_beta_g) in shifted_powers_of_beta_times_gamma_g {
(*key as u32).write_le(&mut writer)?;
(shifted_powers_of_beta_g.len() as u32).write_le(&mut writer)?;
for shifted_power in shifted_powers_of_beta_g {
shifted_power.write_le(&mut writer)?;
}
}
}
self.enforced_degree_bounds.is_some().write_le(&mut writer)?;
if let Some(enforced_degree_bounds) = &self.enforced_degree_bounds {
(enforced_degree_bounds.len() as u32).write_le(&mut writer)?;
for enforced_degree_bound in enforced_degree_bounds {
(*enforced_degree_bound as u32).write_le(&mut writer)?;
}
}
let mut hash_input = self.powers_of_beta_g.to_bytes_le().map_err(|_| error("Could not serialize powers"))?;
self.powers_of_beta_times_gamma_g
.write_le(&mut hash_input)
.map_err(|_| error("Could not serialize powers_of_beta_times_gamma_g"))?;
if let Some(shifted_powers_of_beta_g) = &self.shifted_powers_of_beta_g {
shifted_powers_of_beta_g
.write_le(&mut hash_input)
.map_err(|_| error("Could not serialize shifted_powers_of_beta_g"))?;
}
if let Some(shifted_powers_of_beta_times_gamma_g) = &self.shifted_powers_of_beta_times_gamma_g {
for value in shifted_powers_of_beta_times_gamma_g.values() {
value.write_le(&mut hash_input).map_err(|_| error("Could not serialize shifted_power_of_gamma_g"))?;
}
}
let hash = sha256(&hash_input);
hash.write_le(&mut writer)
}
}
impl<E: PairingEngine> CommitterKey<E> {
fn len(&self) -> usize {
if self.shifted_powers_of_beta_g.is_some() { self.shifted_powers_of_beta_g.as_ref().unwrap().len() } else { 0 }
}
}
#[derive(Debug)]
pub struct CommitterUnionKey<'a, E: PairingEngine> {
pub powers_of_beta_g: Option<&'a Vec<E::G1Affine>>,
pub lagrange_bases_at_beta_g: BTreeMap<usize, &'a Vec<E::G1Affine>>,
pub powers_of_beta_times_gamma_g: Option<&'a Vec<E::G1Affine>>,
pub shifted_powers_of_beta_g: Option<&'a Vec<E::G1Affine>>,
pub shifted_powers_of_beta_times_gamma_g: Option<BTreeMap<usize, &'a Vec<E::G1Affine>>>,
pub enforced_degree_bounds: Option<Vec<usize>>,
}
impl<'a, E: PairingEngine> CommitterUnionKey<'a, E> {
pub fn powers(&self) -> kzg10::Powers<E> {
kzg10::Powers {
powers_of_beta_g: self.powers_of_beta_g.unwrap().as_slice().into(),
powers_of_beta_times_gamma_g: self.powers_of_beta_times_gamma_g.unwrap().as_slice().into(),
}
}
pub fn shifted_powers_of_beta_g(&self, degree_bound: impl Into<Option<usize>>) -> Option<kzg10::Powers<E>> {
match (&self.shifted_powers_of_beta_g, &self.shifted_powers_of_beta_times_gamma_g) {
(Some(shifted_powers_of_beta_g), Some(shifted_powers_of_beta_times_gamma_g)) => {
let max_bound = self.enforced_degree_bounds.as_ref().unwrap().last().unwrap();
let (bound, powers_range) = if let Some(degree_bound) = degree_bound.into() {
assert!(self.enforced_degree_bounds.as_ref().unwrap().contains(°ree_bound));
(degree_bound, (max_bound - degree_bound)..)
} else {
(*max_bound, 0..)
};
let ck = kzg10::Powers {
powers_of_beta_g: shifted_powers_of_beta_g[powers_range].into(),
powers_of_beta_times_gamma_g: shifted_powers_of_beta_times_gamma_g[&bound].clone().into(),
};
Some(ck)
}
(_, _) => None,
}
}
pub fn lagrange_basis(&self, domain: EvaluationDomain<E::Fr>) -> Option<kzg10::LagrangeBasis<E>> {
self.lagrange_bases_at_beta_g.get(&domain.size()).map(|basis| kzg10::LagrangeBasis {
lagrange_basis_at_beta_g: Cow::Borrowed(basis),
powers_of_beta_times_gamma_g: Cow::Borrowed(self.powers_of_beta_times_gamma_g.unwrap()),
domain,
})
}
pub fn union<T: IntoIterator<Item = &'a CommitterKey<E>>>(committer_keys: T) -> Self {
let mut ck_union = CommitterUnionKey::<E> {
powers_of_beta_g: None,
lagrange_bases_at_beta_g: BTreeMap::new(),
powers_of_beta_times_gamma_g: None,
shifted_powers_of_beta_g: None,
shifted_powers_of_beta_times_gamma_g: None,
enforced_degree_bounds: None,
};
let mut enforced_degree_bounds = vec![];
let mut biggest_ck: Option<&CommitterKey<E>> = None;
let mut shifted_powers_of_beta_times_gamma_g = BTreeMap::new();
for ck in committer_keys {
if biggest_ck.is_none() || biggest_ck.unwrap().len() < ck.len() {
biggest_ck = Some(ck);
}
let lagrange_bases = &ck.lagrange_bases_at_beta_g;
for (bound_base, bases) in lagrange_bases.iter() {
ck_union.lagrange_bases_at_beta_g.entry(*bound_base).or_insert(bases);
}
if let Some(shifted_powers) = ck.shifted_powers_of_beta_times_gamma_g.as_ref() {
for (bound_power, powers) in shifted_powers.iter() {
shifted_powers_of_beta_times_gamma_g.entry(*bound_power).or_insert(powers);
}
}
if let Some(degree_bounds) = &ck.enforced_degree_bounds {
enforced_degree_bounds.append(&mut degree_bounds.clone());
}
}
let biggest_ck = biggest_ck.unwrap();
ck_union.powers_of_beta_g = Some(&biggest_ck.powers_of_beta_g);
ck_union.powers_of_beta_times_gamma_g = Some(&biggest_ck.powers_of_beta_times_gamma_g);
ck_union.shifted_powers_of_beta_g = biggest_ck.shifted_powers_of_beta_g.as_ref();
if !enforced_degree_bounds.is_empty() {
enforced_degree_bounds.sort();
enforced_degree_bounds.dedup();
ck_union.enforced_degree_bounds = Some(enforced_degree_bounds);
ck_union.shifted_powers_of_beta_times_gamma_g = Some(shifted_powers_of_beta_times_gamma_g);
}
ck_union
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, CanonicalSerialize, CanonicalDeserialize)]
pub struct BatchProof<E: PairingEngine>(pub(crate) Vec<kzg10::KZGProof<E>>);
impl<E: PairingEngine> BatchProof<E> {
pub fn is_hiding(&self) -> bool {
self.0.iter().any(|c| c.is_hiding())
}
}
pub type PolynomialLabel = String;
#[derive(Clone, Debug, CanonicalSerialize, PartialEq, Eq)]
pub struct LabeledCommitment<C: CanonicalSerialize + 'static> {
label: PolynomialLabel,
commitment: C,
degree_bound: Option<usize>,
}
impl<F: Field, C: CanonicalSerialize + ToConstraintField<F>> ToConstraintField<F> for LabeledCommitment<C> {
fn to_field_elements(&self) -> Result<Vec<F>, ConstraintFieldError> {
self.commitment.to_field_elements()
}
}
impl<C: CanonicalSerialize + ToBytes> ToBytes for LabeledCommitment<C> {
fn write_le<W: Write>(&self, mut writer: W) -> io::Result<()> {
CanonicalSerialize::serialize_compressed(&self.commitment, &mut writer)
.map_err(|_| error("could not serialize struct"))
}
}
impl<C: CanonicalSerialize> LabeledCommitment<C> {
pub fn new(label: PolynomialLabel, commitment: C, degree_bound: Option<usize>) -> Self {
Self { label, commitment, degree_bound }
}
pub fn new_with_info(info: &PolynomialInfo, commitment: C) -> Self {
Self { label: info.label().to_string(), commitment, degree_bound: info.degree_bound() }
}
pub fn label(&self) -> &str {
&self.label
}
pub fn commitment(&self) -> &C {
&self.commitment
}
pub fn degree_bound(&self) -> Option<usize> {
self.degree_bound
}
}
#[derive(Hash, Ord, PartialOrd, Clone, Eq, PartialEq)]
pub enum LCTerm {
One,
PolyLabel(String),
}
impl fmt::Debug for LCTerm {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
LCTerm::One => write!(f, "1"),
LCTerm::PolyLabel(label) => write!(f, "{label}"),
}
}
}
impl LCTerm {
#[inline]
pub fn is_one(&self) -> bool {
matches!(self, LCTerm::One)
}
}
impl From<PolynomialLabel> for LCTerm {
fn from(other: PolynomialLabel) -> Self {
Self::PolyLabel(other)
}
}
impl<'a> From<&'a str> for LCTerm {
fn from(other: &str) -> Self {
Self::PolyLabel(other.into())
}
}
impl core::convert::TryInto<PolynomialLabel> for LCTerm {
type Error = ();
fn try_into(self) -> Result<PolynomialLabel, ()> {
match self {
Self::One => Err(()),
Self::PolyLabel(l) => Ok(l),
}
}
}
impl<'a> core::convert::TryInto<&'a PolynomialLabel> for &'a LCTerm {
type Error = ();
fn try_into(self) -> Result<&'a PolynomialLabel, ()> {
match self {
LCTerm::One => Err(()),
LCTerm::PolyLabel(l) => Ok(l),
}
}
}
impl<B: Borrow<String>> PartialEq<B> for LCTerm {
fn eq(&self, other: &B) -> bool {
match self {
Self::One => false,
Self::PolyLabel(l) => l == other.borrow(),
}
}
}
#[derive(Clone, Debug)]
pub struct LinearCombination<F> {
pub label: String,
pub terms: BTreeMap<LCTerm, F>,
}
#[allow(clippy::or_fun_call)]
impl<F: Field> LinearCombination<F> {
pub fn empty(label: impl Into<String>) -> Self {
Self { label: label.into(), terms: BTreeMap::new() }
}
pub fn new(label: impl Into<String>, _terms: impl IntoIterator<Item = (F, impl Into<LCTerm>)>) -> Self {
let mut terms = BTreeMap::new();
for (c, l) in _terms.into_iter().map(|(c, t)| (c, t.into())) {
*terms.entry(l).or_insert(F::zero()) += c;
}
Self { label: label.into(), terms }
}
pub fn label(&self) -> &str {
&self.label
}
pub fn is_empty(&self) -> bool {
self.terms.is_empty()
}
pub fn add(&mut self, c: F, t: impl Into<LCTerm>) -> &mut Self {
let t = t.into();
*self.terms.entry(t.clone()).or_insert(F::zero()) += c;
if self.terms[&t].is_zero() {
self.terms.remove(&t);
}
self
}
pub fn len(&self) -> usize {
self.terms.len()
}
pub fn iter(&self) -> impl Iterator<Item = (&F, &LCTerm)> {
self.terms.iter().map(|(t, c)| (c, t))
}
}
impl<'a, F: Field> AddAssign<(F, &'a LinearCombination<F>)> for LinearCombination<F> {
#[allow(clippy::suspicious_op_assign_impl)]
fn add_assign(&mut self, (coeff, other): (F, &'a LinearCombination<F>)) {
for (t, c) in other.terms.iter() {
self.add(coeff * c, t.clone());
}
}
}
impl<'a, F: Field> SubAssign<(F, &'a LinearCombination<F>)> for LinearCombination<F> {
#[allow(clippy::suspicious_op_assign_impl)]
fn sub_assign(&mut self, (coeff, other): (F, &'a LinearCombination<F>)) {
for (t, c) in other.terms.iter() {
self.add(-coeff * c, t.clone());
}
}
}
impl<'a, F: Field> AddAssign<&'a LinearCombination<F>> for LinearCombination<F> {
fn add_assign(&mut self, other: &'a LinearCombination<F>) {
for (t, c) in other.terms.iter() {
self.add(*c, t.clone());
}
}
}
impl<'a, F: Field> SubAssign<&'a LinearCombination<F>> for LinearCombination<F> {
fn sub_assign(&mut self, other: &'a LinearCombination<F>) {
for (t, &c) in other.terms.iter() {
self.add(-c, t.clone());
}
}
}
impl<F: Field> AddAssign<F> for LinearCombination<F> {
fn add_assign(&mut self, coeff: F) {
self.add(coeff, LCTerm::One);
}
}
impl<F: Field> SubAssign<F> for LinearCombination<F> {
fn sub_assign(&mut self, coeff: F) {
self.add(-coeff, LCTerm::One);
}
}
impl<F: Field> MulAssign<F> for LinearCombination<F> {
fn mul_assign(&mut self, coeff: F) {
self.terms.values_mut().for_each(|c| *c *= &coeff);
}
}
pub type QuerySet<T> = BTreeSet<(String, (String, T))>;
pub type Evaluations<F> = BTreeMap<(String, F), F>;
pub fn evaluate_query_set<'a, F: PrimeField>(
polys: impl IntoIterator<Item = &'a LabeledPolynomial<F>>,
query_set: &QuerySet<F>,
) -> Evaluations<F> {
let polys: HashMap<_, _> = polys.into_iter().map(|p| (p.label(), p)).collect();
let mut evaluations = Evaluations::new();
for (label, (_point_name, point)) in query_set {
let poly = polys.get(label as &str).expect("polynomial in evaluated lc is not found");
let eval = poly.evaluate(*point);
evaluations.insert((label.clone(), *point), eval);
}
evaluations
}
#[derive(Clone, Debug, PartialEq, Eq, CanonicalSerialize, CanonicalDeserialize)]
pub struct BatchLCProof<E: PairingEngine> {
pub proof: BatchProof<E>,
}
impl<E: PairingEngine> BatchLCProof<E> {
pub fn is_hiding(&self) -> bool {
self.proof.is_hiding()
}
}
impl<E: PairingEngine> FromBytes for BatchLCProof<E> {
fn read_le<R: Read>(mut reader: R) -> io::Result<Self> {
CanonicalDeserialize::deserialize_compressed(&mut reader).map_err(|_| error("could not deserialize struct"))
}
}
impl<E: PairingEngine> ToBytes for BatchLCProof<E> {
fn write_le<W: Write>(&self, mut writer: W) -> io::Result<()> {
CanonicalSerialize::serialize_compressed(self, &mut writer).map_err(|_| error("could not serialize struct"))
}
}