rust_kzg_bn254_verifier/batch.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272
use ark_bn254::{Fr, G1Affine, G1Projective, G2Affine, G2Projective};
use ark_ec::{AffineRepr, CurveGroup};
use ark_ff::{BigInteger, PrimeField};
use ark_serialize::CanonicalSerialize;
use rust_kzg_bn254_primitives::{
blob::Blob,
consts::{BYTES_PER_FIELD_ELEMENT, G2_TAU, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN},
errors::KzgError,
helpers::{self, is_on_curve_g1},
};
/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_blob_kzg_proof_batch
pub fn verify_blob_kzg_proof_batch(
blobs: &[Blob],
commitments: &[G1Affine],
proofs: &[G1Affine],
) -> Result<bool, KzgError> {
// First validation check: Ensure all input vectors have matching lengths
// This is critical for batch verification to work correctly
if !(commitments.len() == blobs.len() && proofs.len() == blobs.len()) {
return Err(KzgError::GenericError(
"length's of the input are not the same".to_owned(),
));
}
// Validate that all commitments are valid points on the G1 curve
// Using parallel iterator (par_iter) for better performance on large batches
// This prevents invalid curve attacks
if commitments.iter().any(|commitment| {
commitment == &G1Affine::identity()
|| !commitment.is_on_curve()
|| !commitment.is_in_correct_subgroup_assuming_on_curve()
}) {
return Err(KzgError::NotOnCurveError(
"commitment not on curve".to_owned(),
));
}
// Validate that all proofs are valid points on the G1 curve
// Using parallel iterator for efficiency
if proofs.iter().any(|proof| {
proof == &G1Affine::identity()
|| !proof.is_on_curve()
|| !proof.is_in_correct_subgroup_assuming_on_curve()
}) {
return Err(KzgError::NotOnCurveError("proof not on curve".to_owned()));
}
// Compute evaluation challenges and evaluate polynomials at those points
// This step:
// 1. Generates random evaluation points for each blob
// 2. Evaluates each blob's polynomial at its corresponding point
let (evaluation_challenges, ys) =
helpers::compute_challenges_and_evaluate_polynomial(blobs, commitments)?;
// Convert each blob to its polynomial evaluation form and get the length of number of field elements
// This length value is needed for computing the challenge
let blobs_as_field_elements_length: Vec<u64> = blobs
.iter()
.map(|blob| blob.to_polynomial_eval_form().evaluations().len() as u64)
.collect();
// Perform the actual batch verification using the computed values:
// - commitments: Original KZG commitments
// - evaluation_challenges: Points where polynomials are evaluated
// - ys: Values of polynomials at evaluation points
// - proofs: KZG proofs for each evaluation
// - blobs_as_field_elements_length: Length of each blob's polynomial
verify_kzg_proof_batch(
commitments,
&evaluation_challenges,
&ys,
proofs,
&blobs_as_field_elements_length,
)
}
/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_kzg_proof_batch
/// A helper function to the `helpers::compute_powers` function. This does the below reference code from the 4844 spec.
/// Ref: `# Append all inputs to the transcript before we hash
/// for commitment, z, y, proof in zip(commitments, zs, ys, proofs):
/// data += commitment + bls_field_to_bytes(z) + bls_field_to_bytes(y) + proof``
fn compute_r_powers(
commitments: &[G1Affine],
zs: &[Fr],
ys: &[Fr],
proofs: &[G1Affine],
blobs_as_field_elements_length: &[u64],
) -> Result<Vec<Fr>, KzgError> {
// Get the number of commitments/proofs we're processing
let n = commitments.len();
// Initial data length includes:
// - 24 bytes for domain separator
// - 8 bytes for number of field elements per blob
// - 8 bytes for number of commitments
let mut initial_data_length: usize = 40;
// Calculate total input size:
// - initial_data_length (40 bytes)
// - For the number of commitments/zs/ys/proofs/blobs_as_field_elements_length (which are all the same length):
// * BYTES_PER_FIELD_ELEMENT for commitment
// * 2 * BYTES_PER_FIELD_ELEMENT for z and y values
// * BYTES_PER_FIELD_ELEMENT for proof
// * 8 bytes for blob length
let input_size = initial_data_length
+ n * (BYTES_PER_FIELD_ELEMENT + 2 * BYTES_PER_FIELD_ELEMENT + BYTES_PER_FIELD_ELEMENT + 8);
// Initialize buffer for data to be hashed
let mut data_to_be_hashed: Vec<u8> = vec![0; input_size];
// Copy domain separator to start of buffer
// This provides domain separation for the hash function
data_to_be_hashed[0..24].copy_from_slice(RANDOM_CHALLENGE_KZG_BATCH_DOMAIN);
// Convert number of commitments to bytes and copy to buffer
// Uses configured endianness (Big or Little)
let n_bytes: [u8; 8] = n.to_be_bytes();
data_to_be_hashed[32..40].copy_from_slice(&n_bytes);
let target_slice = &mut data_to_be_hashed[24..24 + (n * 8)];
for (chunk, &length) in target_slice
.chunks_mut(8)
.zip(blobs_as_field_elements_length)
{
chunk.copy_from_slice(&length.to_be_bytes());
}
initial_data_length += n * 8;
// Process each commitment, proof, and evaluation point/value
for i in 0..n {
// Serialize and copy commitment
let mut v = vec![];
// TODO(anupsv): Move serialization to helper function. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/32
commitments[i].serialize_compressed(&mut v).map_err(|_| {
KzgError::SerializationError("Failed to serialize commitment".to_string())
})?;
data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)]
.copy_from_slice(&v[..]);
initial_data_length += BYTES_PER_FIELD_ELEMENT;
// Convert z point to bytes and copy
let v = zs[i].into_bigint().to_bytes_be();
data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)]
.copy_from_slice(&v[..]);
initial_data_length += BYTES_PER_FIELD_ELEMENT;
// Convert y value to bytes and copy
let v = ys[i].into_bigint().to_bytes_be();
data_to_be_hashed[initial_data_length..(v.len() + initial_data_length)]
.copy_from_slice(&v[..]);
initial_data_length += BYTES_PER_FIELD_ELEMENT;
// Serialize and copy proof
let mut proof_bytes = vec![];
proofs[i]
.serialize_compressed(&mut proof_bytes)
.map_err(|_| KzgError::SerializationError("Failed to serialize proof".to_string()))?;
data_to_be_hashed[initial_data_length..(proof_bytes.len() + initial_data_length)]
.copy_from_slice(&proof_bytes[..]);
initial_data_length += BYTES_PER_FIELD_ELEMENT;
}
// Verify we filled the entire buffer
// This ensures we didn't make any buffer overflow or underflow errors
if initial_data_length != input_size {
return Err(KzgError::InvalidInputLength);
}
// Hash all the data to get our random challenge
let r = helpers::hash_to_field_element(&data_to_be_hashed);
// Compute powers of the random challenge: [r^0, r^1, r^2, ..., r^(n-1)]
Ok(helpers::compute_powers(&r, n))
}
/// Verifies multiple KZG proofs efficiently.
/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#verify_kzg_proof_batch
/// # Arguments
///
/// * `commitments` - A slice of `G1Affine` commitments.
/// * `zs` - A slice of `Fr` elements representing z values.
/// * `ys` - A slice of `Fr` elements representing y values.
/// * `proofs` - A slice of `G1Affine` proofs.
///
/// # Returns
///
/// * `Ok(true)` if all proofs are valid.
/// * `Ok(false)` if any proof is invalid.
/// * `Err(KzgError)` if an error occurs during verification.
///
fn verify_kzg_proof_batch(
commitments: &[G1Affine],
zs: &[Fr],
ys: &[Fr],
proofs: &[G1Affine],
blobs_as_field_elements_length: &[u64],
) -> Result<bool, KzgError> {
// Verify that all input arrays have the same length
// This is crucial for batch verification to work correctly
if !(commitments.len() == zs.len() && zs.len() == ys.len() && ys.len() == proofs.len()) {
return Err(KzgError::GenericError(
"length's of the input are not the same".to_owned(),
));
}
// Check that all commitments are valid points on the G1 curve
// This prevents invalid curve attacks
if !commitments
.iter()
.all(|commitment| is_on_curve_g1(&G1Projective::from(*commitment)))
{
return Err(KzgError::NotOnCurveError(
"commitment not on curve".to_owned(),
));
}
// Check that all proofs are valid points on the G1 curve
if !proofs
.iter()
.all(|proof| is_on_curve_g1(&G1Projective::from(*proof)))
{
return Err(KzgError::NotOnCurveError("proof".to_owned()));
}
// Verify that the trusted setup point τ*G2 is on the G2 curve
if !helpers::is_on_curve_g2(&G2Projective::from(G2_TAU)) {
return Err(KzgError::NotOnCurveError("g2 tau".to_owned()));
}
let n = commitments.len();
// Initialize vectors to store:
// c_minus_y: [C_i - [y_i]] (commitment minus the evaluation point encrypted)
// r_times_z: [r^i * z_i] (powers of random challenge times evaluation points)
let mut c_minus_y: Vec<G1Affine> = Vec::with_capacity(n);
let mut r_times_z: Vec<Fr> = Vec::with_capacity(n);
// Compute powers of the random challenge: [r^0, r^1, r^2, ..., r^(n-1)]
let r_powers = compute_r_powers(commitments, zs, ys, proofs, blobs_as_field_elements_length)?;
// Compute Σ(r^i * proof_i)
let proof_lincomb = helpers::g1_lincomb(proofs, &r_powers)?;
// For each proof i:
// 1. Compute C_i - [y_i]
// 2. Compute r^i * z_i
for i in 0..n {
// Encrypt y_i as a point on G1
let ys_encrypted = G1Affine::generator() * ys[i];
// Compute C_i - [y_i] and convert to affine coordinates
c_minus_y.push((commitments[i] - ys_encrypted).into_affine());
// Compute r^i * z_i
r_times_z.push(r_powers[i] * zs[i]);
}
// Compute:
// proof_z_lincomb = Σ(r^i * z_i * proof_i)
// c_minus_y_lincomb = Σ(r^i * (C_i - [y_i]))
let proof_z_lincomb = helpers::g1_lincomb(proofs, &r_times_z)?;
let c_minus_y_lincomb = helpers::g1_lincomb(&c_minus_y, &r_powers)?;
// Compute right-hand side of the pairing equation
let rhs_g1 = c_minus_y_lincomb + proof_z_lincomb;
// Verify the pairing equation:
// e(Σ(r^i * proof_i), [τ]) = e(Σ(r^i * (C_i - [y_i])) + Σ(r^i * z_i * proof_i), [1])
let result =
helpers::pairings_verify(proof_lincomb, G2_TAU, rhs_g1.into(), G2Affine::generator());
Ok(result)
}