#![allow(dead_code)]
#![allow(unused_imports)]
use crate::gen::Schema::*;
use crate::gen::SparseTensor::*;
use crate::gen::Tensor::*;
use flatbuffers::EndianScalar;
use std::{cmp::Ordering, mem};
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MIN_COMPRESSION_TYPE: i8 = 0;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MAX_COMPRESSION_TYPE: i8 = 1;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_COMPRESSION_TYPE: [CompressionType; 2] =
[CompressionType::LZ4_FRAME, CompressionType::ZSTD];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct CompressionType(pub i8);
#[allow(non_upper_case_globals)]
impl CompressionType {
pub const LZ4_FRAME: Self = Self(0);
pub const ZSTD: Self = Self(1);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 1;
pub const ENUM_VALUES: &'static [Self] = &[Self::LZ4_FRAME, Self::ZSTD];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::LZ4_FRAME => Some("LZ4_FRAME"),
Self::ZSTD => Some("ZSTD"),
_ => None,
}
}
}
impl core::fmt::Debug for CompressionType {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for CompressionType {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for CompressionType {
type Output = CompressionType;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for CompressionType {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for CompressionType {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for CompressionType {}
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MIN_BODY_COMPRESSION_METHOD: i8 = 0;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MAX_BODY_COMPRESSION_METHOD: i8 = 0;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_BODY_COMPRESSION_METHOD: [BodyCompressionMethod; 1] =
[BodyCompressionMethod::BUFFER];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct BodyCompressionMethod(pub i8);
#[allow(non_upper_case_globals)]
impl BodyCompressionMethod {
pub const BUFFER: Self = Self(0);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 0;
pub const ENUM_VALUES: &'static [Self] = &[Self::BUFFER];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::BUFFER => Some("BUFFER"),
_ => None,
}
}
}
impl core::fmt::Debug for BodyCompressionMethod {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for BodyCompressionMethod {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for BodyCompressionMethod {
type Output = BodyCompressionMethod;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for BodyCompressionMethod {
type Scalar = i8;
#[inline]
fn to_little_endian(self) -> i8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: i8) -> Self {
let b = i8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for BodyCompressionMethod {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for BodyCompressionMethod {}
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MIN_MESSAGE_HEADER: u8 = 0;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MAX_MESSAGE_HEADER: u8 = 5;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_MESSAGE_HEADER: [MessageHeader; 6] = [
MessageHeader::NONE,
MessageHeader::Schema,
MessageHeader::DictionaryBatch,
MessageHeader::RecordBatch,
MessageHeader::Tensor,
MessageHeader::SparseTensor,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct MessageHeader(pub u8);
#[allow(non_upper_case_globals)]
impl MessageHeader {
pub const NONE: Self = Self(0);
pub const Schema: Self = Self(1);
pub const DictionaryBatch: Self = Self(2);
pub const RecordBatch: Self = Self(3);
pub const Tensor: Self = Self(4);
pub const SparseTensor: Self = Self(5);
pub const ENUM_MIN: u8 = 0;
pub const ENUM_MAX: u8 = 5;
pub const ENUM_VALUES: &'static [Self] = &[
Self::NONE,
Self::Schema,
Self::DictionaryBatch,
Self::RecordBatch,
Self::Tensor,
Self::SparseTensor,
];
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::NONE => Some("NONE"),
Self::Schema => Some("Schema"),
Self::DictionaryBatch => Some("DictionaryBatch"),
Self::RecordBatch => Some("RecordBatch"),
Self::Tensor => Some("Tensor"),
Self::SparseTensor => Some("SparseTensor"),
_ => None,
}
}
}
impl core::fmt::Debug for MessageHeader {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for MessageHeader {
type Inner = Self;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = flatbuffers::read_scalar_at::<u8>(buf, loc);
Self(b)
}
}
impl flatbuffers::Push for MessageHeader {
type Output = MessageHeader;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
flatbuffers::emplace_scalar::<u8>(dst, self.0);
}
}
impl flatbuffers::EndianScalar for MessageHeader {
type Scalar = u8;
#[inline]
fn to_little_endian(self) -> u8 {
self.0.to_le()
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(v: u8) -> Self {
let b = u8::from_le(v);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for MessageHeader {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
u8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for MessageHeader {}
pub struct MessageHeaderUnionTableOffset {}
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq)]
pub struct FieldNode(pub [u8; 16]);
impl Default for FieldNode {
fn default() -> Self {
Self([0; 16])
}
}
impl core::fmt::Debug for FieldNode {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.debug_struct("FieldNode")
.field("length", &self.length())
.field("null_count", &self.null_count())
.finish()
}
}
impl flatbuffers::SimpleToVerifyInSlice for FieldNode {}
impl<'a> flatbuffers::Follow<'a> for FieldNode {
type Inner = &'a FieldNode;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
<&'a FieldNode>::follow(buf, loc)
}
}
impl<'a> flatbuffers::Follow<'a> for &'a FieldNode {
type Inner = &'a FieldNode;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
flatbuffers::follow_cast_ref::<FieldNode>(buf, loc)
}
}
impl<'b> flatbuffers::Push for FieldNode {
type Output = FieldNode;
#[inline]
unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
let src =
::core::slice::from_raw_parts(self as *const FieldNode as *const u8, Self::size());
dst.copy_from_slice(src);
}
}
impl<'a> flatbuffers::Verifiable for FieldNode {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
v.in_buffer::<Self>(pos)
}
}
impl<'a> FieldNode {
#[allow(clippy::too_many_arguments)]
pub fn new(length: i64, null_count: i64) -> Self {
let mut s = Self([0; 16]);
s.set_length(length);
s.set_null_count(null_count);
s
}
pub fn length(&self) -> i64 {
let mut mem = core::mem::MaybeUninit::<<i64 as EndianScalar>::Scalar>::uninit();
EndianScalar::from_little_endian(unsafe {
core::ptr::copy_nonoverlapping(
self.0[0..].as_ptr(),
mem.as_mut_ptr() as *mut u8,
core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
);
mem.assume_init()
})
}
pub fn set_length(&mut self, x: i64) {
let x_le = x.to_little_endian();
unsafe {
core::ptr::copy_nonoverlapping(
&x_le as *const _ as *const u8,
self.0[0..].as_mut_ptr(),
core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
);
}
}
pub fn null_count(&self) -> i64 {
let mut mem = core::mem::MaybeUninit::<<i64 as EndianScalar>::Scalar>::uninit();
EndianScalar::from_little_endian(unsafe {
core::ptr::copy_nonoverlapping(
self.0[8..].as_ptr(),
mem.as_mut_ptr() as *mut u8,
core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
);
mem.assume_init()
})
}
pub fn set_null_count(&mut self, x: i64) {
let x_le = x.to_little_endian();
unsafe {
core::ptr::copy_nonoverlapping(
&x_le as *const _ as *const u8,
self.0[8..].as_mut_ptr(),
core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
);
}
}
}
pub enum BodyCompressionOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct BodyCompression<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for BodyCompression<'a> {
type Inner = BodyCompression<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table::new(buf, loc),
}
}
}
impl<'a> BodyCompression<'a> {
pub const VT_CODEC: flatbuffers::VOffsetT = 4;
pub const VT_METHOD: flatbuffers::VOffsetT = 6;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
BodyCompression { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args BodyCompressionArgs,
) -> flatbuffers::WIPOffset<BodyCompression<'bldr>> {
let mut builder = BodyCompressionBuilder::new(_fbb);
builder.add_method(args.method);
builder.add_codec(args.codec);
builder.finish()
}
#[inline]
pub fn codec(&self) -> CompressionType {
unsafe {
self._tab
.get::<CompressionType>(BodyCompression::VT_CODEC, Some(CompressionType::LZ4_FRAME))
.unwrap()
}
}
#[inline]
pub fn method(&self) -> BodyCompressionMethod {
unsafe {
self._tab
.get::<BodyCompressionMethod>(
BodyCompression::VT_METHOD,
Some(BodyCompressionMethod::BUFFER),
)
.unwrap()
}
}
}
impl flatbuffers::Verifiable for BodyCompression<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<CompressionType>("codec", Self::VT_CODEC, false)?
.visit_field::<BodyCompressionMethod>("method", Self::VT_METHOD, false)?
.finish();
Ok(())
}
}
pub struct BodyCompressionArgs {
pub codec: CompressionType,
pub method: BodyCompressionMethod,
}
impl<'a> Default for BodyCompressionArgs {
#[inline]
fn default() -> Self {
BodyCompressionArgs {
codec: CompressionType::LZ4_FRAME,
method: BodyCompressionMethod::BUFFER,
}
}
}
pub struct BodyCompressionBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> BodyCompressionBuilder<'a, 'b> {
#[inline]
pub fn add_codec(&mut self, codec: CompressionType) {
self.fbb_.push_slot::<CompressionType>(
BodyCompression::VT_CODEC,
codec,
CompressionType::LZ4_FRAME,
);
}
#[inline]
pub fn add_method(&mut self, method: BodyCompressionMethod) {
self.fbb_.push_slot::<BodyCompressionMethod>(
BodyCompression::VT_METHOD,
method,
BodyCompressionMethod::BUFFER,
);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> BodyCompressionBuilder<'a, 'b> {
let start = _fbb.start_table();
BodyCompressionBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<BodyCompression<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for BodyCompression<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("BodyCompression");
ds.field("codec", &self.codec());
ds.field("method", &self.method());
ds.finish()
}
}
pub enum RecordBatchOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct RecordBatch<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for RecordBatch<'a> {
type Inner = RecordBatch<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table::new(buf, loc),
}
}
}
impl<'a> RecordBatch<'a> {
pub const VT_LENGTH: flatbuffers::VOffsetT = 4;
pub const VT_NODES: flatbuffers::VOffsetT = 6;
pub const VT_BUFFERS: flatbuffers::VOffsetT = 8;
pub const VT_COMPRESSION: flatbuffers::VOffsetT = 10;
pub const VT_VARIADICBUFFERCOUNTS: flatbuffers::VOffsetT = 12;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
RecordBatch { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args RecordBatchArgs<'args>,
) -> flatbuffers::WIPOffset<RecordBatch<'bldr>> {
let mut builder = RecordBatchBuilder::new(_fbb);
builder.add_length(args.length);
if let Some(x) = args.variadicBufferCounts {
builder.add_variadicBufferCounts(x);
}
if let Some(x) = args.compression {
builder.add_compression(x);
}
if let Some(x) = args.buffers {
builder.add_buffers(x);
}
if let Some(x) = args.nodes {
builder.add_nodes(x);
}
builder.finish()
}
#[inline]
pub fn length(&self) -> i64 {
unsafe {
self._tab
.get::<i64>(RecordBatch::VT_LENGTH, Some(0))
.unwrap()
}
}
#[inline]
pub fn nodes(&self) -> Option<flatbuffers::Vector<'a, FieldNode>> {
unsafe {
self._tab
.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, FieldNode>>>(
RecordBatch::VT_NODES,
None,
)
}
}
#[inline]
pub fn buffers(&self) -> Option<flatbuffers::Vector<'a, Buffer>> {
unsafe {
self._tab
.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Buffer>>>(
RecordBatch::VT_BUFFERS,
None,
)
}
}
#[inline]
pub fn compression(&self) -> Option<BodyCompression<'a>> {
unsafe {
self._tab
.get::<flatbuffers::ForwardsUOffset<BodyCompression>>(
RecordBatch::VT_COMPRESSION,
None,
)
}
}
#[inline]
pub fn variadicBufferCounts(&self) -> Option<flatbuffers::Vector<'a, i64>> {
unsafe {
self._tab
.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i64>>>(
RecordBatch::VT_VARIADICBUFFERCOUNTS,
None,
)
}
}
}
impl flatbuffers::Verifiable for RecordBatch<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<i64>("length", Self::VT_LENGTH, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, FieldNode>>>(
"nodes",
Self::VT_NODES,
false,
)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Buffer>>>(
"buffers",
Self::VT_BUFFERS,
false,
)?
.visit_field::<flatbuffers::ForwardsUOffset<BodyCompression>>(
"compression",
Self::VT_COMPRESSION,
false,
)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, i64>>>(
"variadicBufferCounts",
Self::VT_VARIADICBUFFERCOUNTS,
false,
)?
.finish();
Ok(())
}
}
pub struct RecordBatchArgs<'a> {
pub length: i64,
pub nodes: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, FieldNode>>>,
pub buffers: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Buffer>>>,
pub compression: Option<flatbuffers::WIPOffset<BodyCompression<'a>>>,
pub variadicBufferCounts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, i64>>>,
}
impl<'a> Default for RecordBatchArgs<'a> {
#[inline]
fn default() -> Self {
RecordBatchArgs {
length: 0,
nodes: None,
buffers: None,
compression: None,
variadicBufferCounts: None,
}
}
}
pub struct RecordBatchBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> RecordBatchBuilder<'a, 'b> {
#[inline]
pub fn add_length(&mut self, length: i64) {
self.fbb_
.push_slot::<i64>(RecordBatch::VT_LENGTH, length, 0);
}
#[inline]
pub fn add_nodes(&mut self, nodes: flatbuffers::WIPOffset<flatbuffers::Vector<'b, FieldNode>>) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<_>>(RecordBatch::VT_NODES, nodes);
}
#[inline]
pub fn add_buffers(
&mut self,
buffers: flatbuffers::WIPOffset<flatbuffers::Vector<'b, Buffer>>,
) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<_>>(RecordBatch::VT_BUFFERS, buffers);
}
#[inline]
pub fn add_compression(&mut self, compression: flatbuffers::WIPOffset<BodyCompression<'b>>) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<BodyCompression>>(
RecordBatch::VT_COMPRESSION,
compression,
);
}
#[inline]
pub fn add_variadicBufferCounts(
&mut self,
variadicBufferCounts: flatbuffers::WIPOffset<flatbuffers::Vector<'b, i64>>,
) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
RecordBatch::VT_VARIADICBUFFERCOUNTS,
variadicBufferCounts,
);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> RecordBatchBuilder<'a, 'b> {
let start = _fbb.start_table();
RecordBatchBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<RecordBatch<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for RecordBatch<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("RecordBatch");
ds.field("length", &self.length());
ds.field("nodes", &self.nodes());
ds.field("buffers", &self.buffers());
ds.field("compression", &self.compression());
ds.field("variadicBufferCounts", &self.variadicBufferCounts());
ds.finish()
}
}
pub enum DictionaryBatchOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct DictionaryBatch<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for DictionaryBatch<'a> {
type Inner = DictionaryBatch<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table::new(buf, loc),
}
}
}
impl<'a> DictionaryBatch<'a> {
pub const VT_ID: flatbuffers::VOffsetT = 4;
pub const VT_DATA: flatbuffers::VOffsetT = 6;
pub const VT_ISDELTA: flatbuffers::VOffsetT = 8;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
DictionaryBatch { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args DictionaryBatchArgs<'args>,
) -> flatbuffers::WIPOffset<DictionaryBatch<'bldr>> {
let mut builder = DictionaryBatchBuilder::new(_fbb);
builder.add_id(args.id);
if let Some(x) = args.data {
builder.add_data(x);
}
builder.add_isDelta(args.isDelta);
builder.finish()
}
#[inline]
pub fn id(&self) -> i64 {
unsafe {
self._tab
.get::<i64>(DictionaryBatch::VT_ID, Some(0))
.unwrap()
}
}
#[inline]
pub fn data(&self) -> Option<RecordBatch<'a>> {
unsafe {
self._tab
.get::<flatbuffers::ForwardsUOffset<RecordBatch>>(DictionaryBatch::VT_DATA, None)
}
}
#[inline]
pub fn isDelta(&self) -> bool {
unsafe {
self._tab
.get::<bool>(DictionaryBatch::VT_ISDELTA, Some(false))
.unwrap()
}
}
}
impl flatbuffers::Verifiable for DictionaryBatch<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<i64>("id", Self::VT_ID, false)?
.visit_field::<flatbuffers::ForwardsUOffset<RecordBatch>>("data", Self::VT_DATA, false)?
.visit_field::<bool>("isDelta", Self::VT_ISDELTA, false)?
.finish();
Ok(())
}
}
pub struct DictionaryBatchArgs<'a> {
pub id: i64,
pub data: Option<flatbuffers::WIPOffset<RecordBatch<'a>>>,
pub isDelta: bool,
}
impl<'a> Default for DictionaryBatchArgs<'a> {
#[inline]
fn default() -> Self {
DictionaryBatchArgs {
id: 0,
data: None,
isDelta: false,
}
}
}
pub struct DictionaryBatchBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> DictionaryBatchBuilder<'a, 'b> {
#[inline]
pub fn add_id(&mut self, id: i64) {
self.fbb_.push_slot::<i64>(DictionaryBatch::VT_ID, id, 0);
}
#[inline]
pub fn add_data(&mut self, data: flatbuffers::WIPOffset<RecordBatch<'b>>) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<RecordBatch>>(
DictionaryBatch::VT_DATA,
data,
);
}
#[inline]
pub fn add_isDelta(&mut self, isDelta: bool) {
self.fbb_
.push_slot::<bool>(DictionaryBatch::VT_ISDELTA, isDelta, false);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> DictionaryBatchBuilder<'a, 'b> {
let start = _fbb.start_table();
DictionaryBatchBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<DictionaryBatch<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for DictionaryBatch<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("DictionaryBatch");
ds.field("id", &self.id());
ds.field("data", &self.data());
ds.field("isDelta", &self.isDelta());
ds.finish()
}
}
pub enum MessageOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Message<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Message<'a> {
type Inner = Message<'a>;
#[inline]
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table::new(buf, loc),
}
}
}
impl<'a> Message<'a> {
pub const VT_VERSION: flatbuffers::VOffsetT = 4;
pub const VT_HEADER_TYPE: flatbuffers::VOffsetT = 6;
pub const VT_HEADER: flatbuffers::VOffsetT = 8;
pub const VT_BODYLENGTH: flatbuffers::VOffsetT = 10;
pub const VT_CUSTOM_METADATA: flatbuffers::VOffsetT = 12;
#[inline]
pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Message { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args MessageArgs<'args>,
) -> flatbuffers::WIPOffset<Message<'bldr>> {
let mut builder = MessageBuilder::new(_fbb);
builder.add_bodyLength(args.bodyLength);
if let Some(x) = args.custom_metadata {
builder.add_custom_metadata(x);
}
if let Some(x) = args.header {
builder.add_header(x);
}
builder.add_version(args.version);
builder.add_header_type(args.header_type);
builder.finish()
}
#[inline]
pub fn version(&self) -> MetadataVersion {
unsafe {
self._tab
.get::<MetadataVersion>(Message::VT_VERSION, Some(MetadataVersion::V1))
.unwrap()
}
}
#[inline]
pub fn header_type(&self) -> MessageHeader {
unsafe {
self._tab
.get::<MessageHeader>(Message::VT_HEADER_TYPE, Some(MessageHeader::NONE))
.unwrap()
}
}
#[inline]
pub fn header(&self) -> Option<flatbuffers::Table<'a>> {
unsafe {
self._tab
.get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
Message::VT_HEADER,
None,
)
}
}
#[inline]
pub fn bodyLength(&self) -> i64 {
unsafe {
self._tab
.get::<i64>(Message::VT_BODYLENGTH, Some(0))
.unwrap()
}
}
#[inline]
pub fn custom_metadata(
&self,
) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue<'a>>>> {
unsafe {
self._tab.get::<flatbuffers::ForwardsUOffset<
flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue>>,
>>(Message::VT_CUSTOM_METADATA, None)
}
}
#[inline]
#[allow(non_snake_case)]
pub fn header_as_schema(&self) -> Option<Schema<'a>> {
if self.header_type() == MessageHeader::Schema {
self.header().map(|t| {
unsafe { Schema::init_from_table(t) }
})
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn header_as_dictionary_batch(&self) -> Option<DictionaryBatch<'a>> {
if self.header_type() == MessageHeader::DictionaryBatch {
self.header().map(|t| {
unsafe { DictionaryBatch::init_from_table(t) }
})
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn header_as_record_batch(&self) -> Option<RecordBatch<'a>> {
if self.header_type() == MessageHeader::RecordBatch {
self.header().map(|t| {
unsafe { RecordBatch::init_from_table(t) }
})
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn header_as_tensor(&self) -> Option<Tensor<'a>> {
if self.header_type() == MessageHeader::Tensor {
self.header().map(|t| {
unsafe { Tensor::init_from_table(t) }
})
} else {
None
}
}
#[inline]
#[allow(non_snake_case)]
pub fn header_as_sparse_tensor(&self) -> Option<SparseTensor<'a>> {
if self.header_type() == MessageHeader::SparseTensor {
self.header().map(|t| {
unsafe { SparseTensor::init_from_table(t) }
})
} else {
None
}
}
}
impl flatbuffers::Verifiable for Message<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<MetadataVersion>("version", Self::VT_VERSION, false)?
.visit_union::<MessageHeader, _>(
"header_type",
Self::VT_HEADER_TYPE,
"header",
Self::VT_HEADER,
false,
|key, v, pos| match key {
MessageHeader::Schema => v
.verify_union_variant::<flatbuffers::ForwardsUOffset<Schema>>(
"MessageHeader::Schema",
pos,
),
MessageHeader::DictionaryBatch => v
.verify_union_variant::<flatbuffers::ForwardsUOffset<DictionaryBatch>>(
"MessageHeader::DictionaryBatch",
pos,
),
MessageHeader::RecordBatch => v
.verify_union_variant::<flatbuffers::ForwardsUOffset<RecordBatch>>(
"MessageHeader::RecordBatch",
pos,
),
MessageHeader::Tensor => v
.verify_union_variant::<flatbuffers::ForwardsUOffset<Tensor>>(
"MessageHeader::Tensor",
pos,
),
MessageHeader::SparseTensor => v
.verify_union_variant::<flatbuffers::ForwardsUOffset<SparseTensor>>(
"MessageHeader::SparseTensor",
pos,
),
_ => Ok(()),
},
)?
.visit_field::<i64>("bodyLength", Self::VT_BODYLENGTH, false)?
.visit_field::<flatbuffers::ForwardsUOffset<
flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<KeyValue>>,
>>("custom_metadata", Self::VT_CUSTOM_METADATA, false)?
.finish();
Ok(())
}
}
pub struct MessageArgs<'a> {
pub version: MetadataVersion,
pub header_type: MessageHeader,
pub header: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
pub bodyLength: i64,
pub custom_metadata: Option<
flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue<'a>>>>,
>,
}
impl<'a> Default for MessageArgs<'a> {
#[inline]
fn default() -> Self {
MessageArgs {
version: MetadataVersion::V1,
header_type: MessageHeader::NONE,
header: None,
bodyLength: 0,
custom_metadata: None,
}
}
}
pub struct MessageBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> MessageBuilder<'a, 'b> {
#[inline]
pub fn add_version(&mut self, version: MetadataVersion) {
self.fbb_
.push_slot::<MetadataVersion>(Message::VT_VERSION, version, MetadataVersion::V1);
}
#[inline]
pub fn add_header_type(&mut self, header_type: MessageHeader) {
self.fbb_.push_slot::<MessageHeader>(
Message::VT_HEADER_TYPE,
header_type,
MessageHeader::NONE,
);
}
#[inline]
pub fn add_header(&mut self, header: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<_>>(Message::VT_HEADER, header);
}
#[inline]
pub fn add_bodyLength(&mut self, bodyLength: i64) {
self.fbb_
.push_slot::<i64>(Message::VT_BODYLENGTH, bodyLength, 0);
}
#[inline]
pub fn add_custom_metadata(
&mut self,
custom_metadata: flatbuffers::WIPOffset<
flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<KeyValue<'b>>>,
>,
) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
Message::VT_CUSTOM_METADATA,
custom_metadata,
);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> MessageBuilder<'a, 'b> {
let start = _fbb.start_table();
MessageBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Message<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl core::fmt::Debug for Message<'_> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let mut ds = f.debug_struct("Message");
ds.field("version", &self.version());
ds.field("header_type", &self.header_type());
match self.header_type() {
MessageHeader::Schema => {
if let Some(x) = self.header_as_schema() {
ds.field("header", &x)
} else {
ds.field(
"header",
&"InvalidFlatbuffer: Union discriminant does not match value.",
)
}
}
MessageHeader::DictionaryBatch => {
if let Some(x) = self.header_as_dictionary_batch() {
ds.field("header", &x)
} else {
ds.field(
"header",
&"InvalidFlatbuffer: Union discriminant does not match value.",
)
}
}
MessageHeader::RecordBatch => {
if let Some(x) = self.header_as_record_batch() {
ds.field("header", &x)
} else {
ds.field(
"header",
&"InvalidFlatbuffer: Union discriminant does not match value.",
)
}
}
MessageHeader::Tensor => {
if let Some(x) = self.header_as_tensor() {
ds.field("header", &x)
} else {
ds.field(
"header",
&"InvalidFlatbuffer: Union discriminant does not match value.",
)
}
}
MessageHeader::SparseTensor => {
if let Some(x) = self.header_as_sparse_tensor() {
ds.field("header", &x)
} else {
ds.field(
"header",
&"InvalidFlatbuffer: Union discriminant does not match value.",
)
}
}
_ => {
let x: Option<()> = None;
ds.field("header", &x)
}
};
ds.field("bodyLength", &self.bodyLength());
ds.field("custom_metadata", &self.custom_metadata());
ds.finish()
}
}
#[inline]
pub fn root_as_message(buf: &[u8]) -> Result<Message, flatbuffers::InvalidFlatbuffer> {
flatbuffers::root::<Message>(buf)
}
#[inline]
pub fn size_prefixed_root_as_message(
buf: &[u8],
) -> Result<Message, flatbuffers::InvalidFlatbuffer> {
flatbuffers::size_prefixed_root::<Message>(buf)
}
#[inline]
pub fn root_as_message_with_opts<'b, 'o>(
opts: &'o flatbuffers::VerifierOptions,
buf: &'b [u8],
) -> Result<Message<'b>, flatbuffers::InvalidFlatbuffer> {
flatbuffers::root_with_opts::<Message<'b>>(opts, buf)
}
#[inline]
pub fn size_prefixed_root_as_message_with_opts<'b, 'o>(
opts: &'o flatbuffers::VerifierOptions,
buf: &'b [u8],
) -> Result<Message<'b>, flatbuffers::InvalidFlatbuffer> {
flatbuffers::size_prefixed_root_with_opts::<Message<'b>>(opts, buf)
}
#[inline]
pub unsafe fn root_as_message_unchecked(buf: &[u8]) -> Message {
flatbuffers::root_unchecked::<Message>(buf)
}
#[inline]
pub unsafe fn size_prefixed_root_as_message_unchecked(buf: &[u8]) -> Message {
flatbuffers::size_prefixed_root_unchecked::<Message>(buf)
}
#[inline]
pub fn finish_message_buffer<'a, 'b>(
fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
root: flatbuffers::WIPOffset<Message<'a>>,
) {
fbb.finish(root, None);
}
#[inline]
pub fn finish_size_prefixed_message_buffer<'a, 'b>(
fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
root: flatbuffers::WIPOffset<Message<'a>>,
) {
fbb.finish_size_prefixed(root, None);
}