#![cfg_attr(not(feature = "std"), no_std)]
#![warn(
unused,
future_incompatible,
nonstandard_style,
rust_2018_idioms,
rust_2021_compatibility
)]
#![forbid(unsafe_code)]
#![doc = include_str!("../README.md")]
mod error;
mod flags;
mod impls;
pub use ark_std::io::{Read, Write};
pub use error::*;
pub use flags::*;
#[cfg(test)]
mod test;
#[cfg(feature = "derive")]
#[doc(hidden)]
pub use ark_serialize_derive::*;
use digest::{generic_array::GenericArray, Digest, OutputSizeUser};
#[macro_export]
macro_rules! serialize_to_vec {
($($x:expr),*) => ({
let mut buf = ::ark_std::vec![];
{$crate::serialize_to_vec!(@inner buf, $($x),*)}.map(|_| buf)
});
(@inner $buf:expr, $y:expr, $($x:expr),*) => ({
{
$crate::CanonicalSerialize::serialize_uncompressed(&$y, &mut $buf)
}.and({$crate::serialize_to_vec!(@inner $buf, $($x),*)})
});
(@inner $buf:expr, $x:expr) => ({
$crate::CanonicalSerialize::serialize_uncompressed(&$x, &mut $buf)
});
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum Compress {
Yes,
No,
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum Validate {
Yes,
No,
}
pub trait Valid: Sized + Sync {
fn check(&self) -> Result<(), SerializationError>;
fn batch_check<'a>(
batch: impl Iterator<Item = &'a Self> + Send,
) -> Result<(), SerializationError>
where
Self: 'a,
{
#[cfg(feature = "parallel")]
{
use rayon::{iter::ParallelBridge, prelude::ParallelIterator};
batch.par_bridge().try_for_each(|e| e.check())?;
}
#[cfg(not(feature = "parallel"))]
{
for item in batch {
item.check()?;
}
}
Ok(())
}
}
pub trait CanonicalSerialize {
fn serialize_with_mode<W: Write>(
&self,
writer: W,
compress: Compress,
) -> Result<(), SerializationError>;
fn serialized_size(&self, compress: Compress) -> usize;
fn serialize_compressed<W: Write>(&self, writer: W) -> Result<(), SerializationError> {
self.serialize_with_mode(writer, Compress::Yes)
}
fn compressed_size(&self) -> usize {
self.serialized_size(Compress::Yes)
}
fn serialize_uncompressed<W: Write>(&self, writer: W) -> Result<(), SerializationError> {
self.serialize_with_mode(writer, Compress::No)
}
fn uncompressed_size(&self) -> usize {
self.serialized_size(Compress::No)
}
}
pub trait CanonicalDeserialize: Valid {
fn deserialize_with_mode<R: Read>(
reader: R,
compress: Compress,
validate: Validate,
) -> Result<Self, SerializationError>;
fn deserialize_compressed<R: Read>(reader: R) -> Result<Self, SerializationError> {
Self::deserialize_with_mode(reader, Compress::Yes, Validate::Yes)
}
fn deserialize_compressed_unchecked<R: Read>(reader: R) -> Result<Self, SerializationError> {
Self::deserialize_with_mode(reader, Compress::Yes, Validate::No)
}
fn deserialize_uncompressed<R: Read>(reader: R) -> Result<Self, SerializationError> {
Self::deserialize_with_mode(reader, Compress::No, Validate::Yes)
}
fn deserialize_uncompressed_unchecked<R: Read>(reader: R) -> Result<Self, SerializationError> {
Self::deserialize_with_mode(reader, Compress::No, Validate::No)
}
}
pub trait CanonicalSerializeWithFlags: CanonicalSerialize {
fn serialize_with_flags<W: Write, F: Flags>(
&self,
writer: W,
flags: F,
) -> Result<(), SerializationError>;
fn serialized_size_with_flags<F: Flags>(&self) -> usize;
}
pub trait CanonicalDeserializeWithFlags: Sized {
fn deserialize_with_flags<R: Read, F: Flags>(
reader: R,
) -> Result<(Self, F), SerializationError>;
}
struct HashMarshaller<'a, H: Digest>(&'a mut H);
impl<'a, H: Digest> ark_std::io::Write for HashMarshaller<'a, H> {
#[inline]
fn write(&mut self, buf: &[u8]) -> ark_std::io::Result<usize> {
Digest::update(self.0, buf);
Ok(buf.len())
}
#[inline]
fn flush(&mut self) -> ark_std::io::Result<()> {
Ok(())
}
}
pub trait CanonicalSerializeHashExt: CanonicalSerialize {
fn hash<H: Digest>(&self) -> GenericArray<u8, <H as OutputSizeUser>::OutputSize> {
let mut hasher = H::new();
self.serialize_compressed(HashMarshaller(&mut hasher))
.expect("HashMarshaller::flush should be infaillible!");
hasher.finalize()
}
fn hash_uncompressed<H: Digest>(&self) -> GenericArray<u8, <H as OutputSizeUser>::OutputSize> {
let mut hasher = H::new();
self.serialize_uncompressed(HashMarshaller(&mut hasher))
.expect("HashMarshaller::flush should be infaillible!");
hasher.finalize()
}
}
impl<T: CanonicalSerialize> CanonicalSerializeHashExt for T {}
#[inline]
pub fn buffer_bit_byte_size(modulus_bits: usize) -> (usize, usize) {
let byte_size = buffer_byte_size(modulus_bits);
((byte_size * 8), byte_size)
}
#[inline]
pub const fn buffer_byte_size(modulus_bits: usize) -> usize {
(modulus_bits + 7) / 8
}