1#![allow(dead_code)]
19#![allow(unused_imports)]
20
21use crate::gen::Schema::*;
22use crate::gen::SparseTensor::*;
23use crate::gen::Tensor::*;
24use flatbuffers::EndianScalar;
25use std::{cmp::Ordering, mem};
26#[deprecated(
31 since = "2.0.0",
32 note = "Use associated constants instead. This will no longer be generated in 2021."
33)]
34pub const ENUM_MIN_COMPRESSION_TYPE: i8 = 0;
35#[deprecated(
36 since = "2.0.0",
37 note = "Use associated constants instead. This will no longer be generated in 2021."
38)]
39pub const ENUM_MAX_COMPRESSION_TYPE: i8 = 1;
40#[deprecated(
41 since = "2.0.0",
42 note = "Use associated constants instead. This will no longer be generated in 2021."
43)]
44#[allow(non_camel_case_types)]
45pub const ENUM_VALUES_COMPRESSION_TYPE: [CompressionType; 2] =
46 [CompressionType::LZ4_FRAME, CompressionType::ZSTD];
47
48#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
49#[repr(transparent)]
50pub struct CompressionType(pub i8);
51#[allow(non_upper_case_globals)]
52impl CompressionType {
53 pub const LZ4_FRAME: Self = Self(0);
54 pub const ZSTD: Self = Self(1);
55
56 pub const ENUM_MIN: i8 = 0;
57 pub const ENUM_MAX: i8 = 1;
58 pub const ENUM_VALUES: &'static [Self] = &[Self::LZ4_FRAME, Self::ZSTD];
59 pub fn variant_name(self) -> Option<&'static str> {
61 match self {
62 Self::LZ4_FRAME => Some("LZ4_FRAME"),
63 Self::ZSTD => Some("ZSTD"),
64 _ => None,
65 }
66 }
67}
68impl core::fmt::Debug for CompressionType {
69 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
70 if let Some(name) = self.variant_name() {
71 f.write_str(name)
72 } else {
73 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
74 }
75 }
76}
77impl<'a> flatbuffers::Follow<'a> for CompressionType {
78 type Inner = Self;
79 #[inline]
80 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
81 let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
82 Self(b)
83 }
84}
85
86impl flatbuffers::Push for CompressionType {
87 type Output = CompressionType;
88 #[inline]
89 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
90 flatbuffers::emplace_scalar::<i8>(dst, self.0);
91 }
92}
93
94impl flatbuffers::EndianScalar for CompressionType {
95 type Scalar = i8;
96 #[inline]
97 fn to_little_endian(self) -> i8 {
98 self.0.to_le()
99 }
100 #[inline]
101 #[allow(clippy::wrong_self_convention)]
102 fn from_little_endian(v: i8) -> Self {
103 let b = i8::from_le(v);
104 Self(b)
105 }
106}
107
108impl<'a> flatbuffers::Verifiable for CompressionType {
109 #[inline]
110 fn run_verifier(
111 v: &mut flatbuffers::Verifier,
112 pos: usize,
113 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
114 use flatbuffers::Verifiable;
115 i8::run_verifier(v, pos)
116 }
117}
118
119impl flatbuffers::SimpleToVerifyInSlice for CompressionType {}
120#[deprecated(
121 since = "2.0.0",
122 note = "Use associated constants instead. This will no longer be generated in 2021."
123)]
124pub const ENUM_MIN_BODY_COMPRESSION_METHOD: i8 = 0;
125#[deprecated(
126 since = "2.0.0",
127 note = "Use associated constants instead. This will no longer be generated in 2021."
128)]
129pub const ENUM_MAX_BODY_COMPRESSION_METHOD: i8 = 0;
130#[deprecated(
131 since = "2.0.0",
132 note = "Use associated constants instead. This will no longer be generated in 2021."
133)]
134#[allow(non_camel_case_types)]
135pub const ENUM_VALUES_BODY_COMPRESSION_METHOD: [BodyCompressionMethod; 1] =
136 [BodyCompressionMethod::BUFFER];
137
138#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
142#[repr(transparent)]
143pub struct BodyCompressionMethod(pub i8);
144#[allow(non_upper_case_globals)]
145impl BodyCompressionMethod {
146 pub const BUFFER: Self = Self(0);
154
155 pub const ENUM_MIN: i8 = 0;
156 pub const ENUM_MAX: i8 = 0;
157 pub const ENUM_VALUES: &'static [Self] = &[Self::BUFFER];
158 pub fn variant_name(self) -> Option<&'static str> {
160 match self {
161 Self::BUFFER => Some("BUFFER"),
162 _ => None,
163 }
164 }
165}
166impl core::fmt::Debug for BodyCompressionMethod {
167 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
168 if let Some(name) = self.variant_name() {
169 f.write_str(name)
170 } else {
171 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
172 }
173 }
174}
175impl<'a> flatbuffers::Follow<'a> for BodyCompressionMethod {
176 type Inner = Self;
177 #[inline]
178 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
179 let b = flatbuffers::read_scalar_at::<i8>(buf, loc);
180 Self(b)
181 }
182}
183
184impl flatbuffers::Push for BodyCompressionMethod {
185 type Output = BodyCompressionMethod;
186 #[inline]
187 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
188 flatbuffers::emplace_scalar::<i8>(dst, self.0);
189 }
190}
191
192impl flatbuffers::EndianScalar for BodyCompressionMethod {
193 type Scalar = i8;
194 #[inline]
195 fn to_little_endian(self) -> i8 {
196 self.0.to_le()
197 }
198 #[inline]
199 #[allow(clippy::wrong_self_convention)]
200 fn from_little_endian(v: i8) -> Self {
201 let b = i8::from_le(v);
202 Self(b)
203 }
204}
205
206impl<'a> flatbuffers::Verifiable for BodyCompressionMethod {
207 #[inline]
208 fn run_verifier(
209 v: &mut flatbuffers::Verifier,
210 pos: usize,
211 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
212 use flatbuffers::Verifiable;
213 i8::run_verifier(v, pos)
214 }
215}
216
217impl flatbuffers::SimpleToVerifyInSlice for BodyCompressionMethod {}
218#[deprecated(
219 since = "2.0.0",
220 note = "Use associated constants instead. This will no longer be generated in 2021."
221)]
222pub const ENUM_MIN_MESSAGE_HEADER: u8 = 0;
223#[deprecated(
224 since = "2.0.0",
225 note = "Use associated constants instead. This will no longer be generated in 2021."
226)]
227pub const ENUM_MAX_MESSAGE_HEADER: u8 = 5;
228#[deprecated(
229 since = "2.0.0",
230 note = "Use associated constants instead. This will no longer be generated in 2021."
231)]
232#[allow(non_camel_case_types)]
233pub const ENUM_VALUES_MESSAGE_HEADER: [MessageHeader; 6] = [
234 MessageHeader::NONE,
235 MessageHeader::Schema,
236 MessageHeader::DictionaryBatch,
237 MessageHeader::RecordBatch,
238 MessageHeader::Tensor,
239 MessageHeader::SparseTensor,
240];
241
242#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
251#[repr(transparent)]
252pub struct MessageHeader(pub u8);
253#[allow(non_upper_case_globals)]
254impl MessageHeader {
255 pub const NONE: Self = Self(0);
256 pub const Schema: Self = Self(1);
257 pub const DictionaryBatch: Self = Self(2);
258 pub const RecordBatch: Self = Self(3);
259 pub const Tensor: Self = Self(4);
260 pub const SparseTensor: Self = Self(5);
261
262 pub const ENUM_MIN: u8 = 0;
263 pub const ENUM_MAX: u8 = 5;
264 pub const ENUM_VALUES: &'static [Self] = &[
265 Self::NONE,
266 Self::Schema,
267 Self::DictionaryBatch,
268 Self::RecordBatch,
269 Self::Tensor,
270 Self::SparseTensor,
271 ];
272 pub fn variant_name(self) -> Option<&'static str> {
274 match self {
275 Self::NONE => Some("NONE"),
276 Self::Schema => Some("Schema"),
277 Self::DictionaryBatch => Some("DictionaryBatch"),
278 Self::RecordBatch => Some("RecordBatch"),
279 Self::Tensor => Some("Tensor"),
280 Self::SparseTensor => Some("SparseTensor"),
281 _ => None,
282 }
283 }
284}
285impl core::fmt::Debug for MessageHeader {
286 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
287 if let Some(name) = self.variant_name() {
288 f.write_str(name)
289 } else {
290 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
291 }
292 }
293}
294impl<'a> flatbuffers::Follow<'a> for MessageHeader {
295 type Inner = Self;
296 #[inline]
297 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
298 let b = flatbuffers::read_scalar_at::<u8>(buf, loc);
299 Self(b)
300 }
301}
302
303impl flatbuffers::Push for MessageHeader {
304 type Output = MessageHeader;
305 #[inline]
306 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
307 flatbuffers::emplace_scalar::<u8>(dst, self.0);
308 }
309}
310
311impl flatbuffers::EndianScalar for MessageHeader {
312 type Scalar = u8;
313 #[inline]
314 fn to_little_endian(self) -> u8 {
315 self.0.to_le()
316 }
317 #[inline]
318 #[allow(clippy::wrong_self_convention)]
319 fn from_little_endian(v: u8) -> Self {
320 let b = u8::from_le(v);
321 Self(b)
322 }
323}
324
325impl<'a> flatbuffers::Verifiable for MessageHeader {
326 #[inline]
327 fn run_verifier(
328 v: &mut flatbuffers::Verifier,
329 pos: usize,
330 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
331 use flatbuffers::Verifiable;
332 u8::run_verifier(v, pos)
333 }
334}
335
336impl flatbuffers::SimpleToVerifyInSlice for MessageHeader {}
337pub struct MessageHeaderUnionTableOffset {}
338
339#[repr(transparent)]
350#[derive(Clone, Copy, PartialEq)]
351pub struct FieldNode(pub [u8; 16]);
352impl Default for FieldNode {
353 fn default() -> Self {
354 Self([0; 16])
355 }
356}
357impl core::fmt::Debug for FieldNode {
358 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
359 f.debug_struct("FieldNode")
360 .field("length", &self.length())
361 .field("null_count", &self.null_count())
362 .finish()
363 }
364}
365
366impl flatbuffers::SimpleToVerifyInSlice for FieldNode {}
367impl<'a> flatbuffers::Follow<'a> for FieldNode {
368 type Inner = &'a FieldNode;
369 #[inline]
370 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
371 <&'a FieldNode>::follow(buf, loc)
372 }
373}
374impl<'a> flatbuffers::Follow<'a> for &'a FieldNode {
375 type Inner = &'a FieldNode;
376 #[inline]
377 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
378 flatbuffers::follow_cast_ref::<FieldNode>(buf, loc)
379 }
380}
381impl<'b> flatbuffers::Push for FieldNode {
382 type Output = FieldNode;
383 #[inline]
384 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
385 let src =
386 ::core::slice::from_raw_parts(self as *const FieldNode as *const u8, Self::size());
387 dst.copy_from_slice(src);
388 }
389 #[inline]
390 fn alignment() -> flatbuffers::PushAlignment {
391 flatbuffers::PushAlignment::new(8)
392 }
393}
394
395impl<'a> flatbuffers::Verifiable for FieldNode {
396 #[inline]
397 fn run_verifier(
398 v: &mut flatbuffers::Verifier,
399 pos: usize,
400 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
401 use flatbuffers::Verifiable;
402 v.in_buffer::<Self>(pos)
403 }
404}
405
406impl<'a> FieldNode {
407 #[allow(clippy::too_many_arguments)]
408 pub fn new(length: i64, null_count: i64) -> Self {
409 let mut s = Self([0; 16]);
410 s.set_length(length);
411 s.set_null_count(null_count);
412 s
413 }
414
415 pub fn length(&self) -> i64 {
418 let mut mem = core::mem::MaybeUninit::<<i64 as EndianScalar>::Scalar>::uninit();
419 EndianScalar::from_little_endian(unsafe {
423 core::ptr::copy_nonoverlapping(
424 self.0[0..].as_ptr(),
425 mem.as_mut_ptr() as *mut u8,
426 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
427 );
428 mem.assume_init()
429 })
430 }
431
432 pub fn set_length(&mut self, x: i64) {
433 let x_le = x.to_little_endian();
434 unsafe {
438 core::ptr::copy_nonoverlapping(
439 &x_le as *const _ as *const u8,
440 self.0[0..].as_mut_ptr(),
441 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
442 );
443 }
444 }
445
446 pub fn null_count(&self) -> i64 {
450 let mut mem = core::mem::MaybeUninit::<<i64 as EndianScalar>::Scalar>::uninit();
451 EndianScalar::from_little_endian(unsafe {
455 core::ptr::copy_nonoverlapping(
456 self.0[8..].as_ptr(),
457 mem.as_mut_ptr() as *mut u8,
458 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
459 );
460 mem.assume_init()
461 })
462 }
463
464 pub fn set_null_count(&mut self, x: i64) {
465 let x_le = x.to_little_endian();
466 unsafe {
470 core::ptr::copy_nonoverlapping(
471 &x_le as *const _ as *const u8,
472 self.0[8..].as_mut_ptr(),
473 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
474 );
475 }
476 }
477}
478
479pub enum BodyCompressionOffset {}
480#[derive(Copy, Clone, PartialEq)]
481
482pub struct BodyCompression<'a> {
486 pub _tab: flatbuffers::Table<'a>,
487}
488
489impl<'a> flatbuffers::Follow<'a> for BodyCompression<'a> {
490 type Inner = BodyCompression<'a>;
491 #[inline]
492 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
493 Self {
494 _tab: flatbuffers::Table::new(buf, loc),
495 }
496 }
497}
498
499impl<'a> BodyCompression<'a> {
500 pub const VT_CODEC: flatbuffers::VOffsetT = 4;
501 pub const VT_METHOD: flatbuffers::VOffsetT = 6;
502
503 #[inline]
504 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
505 BodyCompression { _tab: table }
506 }
507 #[allow(unused_mut)]
508 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
509 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
510 args: &'args BodyCompressionArgs,
511 ) -> flatbuffers::WIPOffset<BodyCompression<'bldr>> {
512 let mut builder = BodyCompressionBuilder::new(_fbb);
513 builder.add_method(args.method);
514 builder.add_codec(args.codec);
515 builder.finish()
516 }
517
518 #[inline]
521 pub fn codec(&self) -> CompressionType {
522 unsafe {
526 self._tab
527 .get::<CompressionType>(BodyCompression::VT_CODEC, Some(CompressionType::LZ4_FRAME))
528 .unwrap()
529 }
530 }
531 #[inline]
533 pub fn method(&self) -> BodyCompressionMethod {
534 unsafe {
538 self._tab
539 .get::<BodyCompressionMethod>(
540 BodyCompression::VT_METHOD,
541 Some(BodyCompressionMethod::BUFFER),
542 )
543 .unwrap()
544 }
545 }
546}
547
548impl flatbuffers::Verifiable for BodyCompression<'_> {
549 #[inline]
550 fn run_verifier(
551 v: &mut flatbuffers::Verifier,
552 pos: usize,
553 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
554 use flatbuffers::Verifiable;
555 v.visit_table(pos)?
556 .visit_field::<CompressionType>("codec", Self::VT_CODEC, false)?
557 .visit_field::<BodyCompressionMethod>("method", Self::VT_METHOD, false)?
558 .finish();
559 Ok(())
560 }
561}
562pub struct BodyCompressionArgs {
563 pub codec: CompressionType,
564 pub method: BodyCompressionMethod,
565}
566impl<'a> Default for BodyCompressionArgs {
567 #[inline]
568 fn default() -> Self {
569 BodyCompressionArgs {
570 codec: CompressionType::LZ4_FRAME,
571 method: BodyCompressionMethod::BUFFER,
572 }
573 }
574}
575
576pub struct BodyCompressionBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
577 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
578 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
579}
580impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BodyCompressionBuilder<'a, 'b, A> {
581 #[inline]
582 pub fn add_codec(&mut self, codec: CompressionType) {
583 self.fbb_.push_slot::<CompressionType>(
584 BodyCompression::VT_CODEC,
585 codec,
586 CompressionType::LZ4_FRAME,
587 );
588 }
589 #[inline]
590 pub fn add_method(&mut self, method: BodyCompressionMethod) {
591 self.fbb_.push_slot::<BodyCompressionMethod>(
592 BodyCompression::VT_METHOD,
593 method,
594 BodyCompressionMethod::BUFFER,
595 );
596 }
597 #[inline]
598 pub fn new(
599 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
600 ) -> BodyCompressionBuilder<'a, 'b, A> {
601 let start = _fbb.start_table();
602 BodyCompressionBuilder {
603 fbb_: _fbb,
604 start_: start,
605 }
606 }
607 #[inline]
608 pub fn finish(self) -> flatbuffers::WIPOffset<BodyCompression<'a>> {
609 let o = self.fbb_.end_table(self.start_);
610 flatbuffers::WIPOffset::new(o.value())
611 }
612}
613
614impl core::fmt::Debug for BodyCompression<'_> {
615 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
616 let mut ds = f.debug_struct("BodyCompression");
617 ds.field("codec", &self.codec());
618 ds.field("method", &self.method());
619 ds.finish()
620 }
621}
622pub enum RecordBatchOffset {}
623#[derive(Copy, Clone, PartialEq)]
624
625pub struct RecordBatch<'a> {
629 pub _tab: flatbuffers::Table<'a>,
630}
631
632impl<'a> flatbuffers::Follow<'a> for RecordBatch<'a> {
633 type Inner = RecordBatch<'a>;
634 #[inline]
635 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
636 Self {
637 _tab: flatbuffers::Table::new(buf, loc),
638 }
639 }
640}
641
642impl<'a> RecordBatch<'a> {
643 pub const VT_LENGTH: flatbuffers::VOffsetT = 4;
644 pub const VT_NODES: flatbuffers::VOffsetT = 6;
645 pub const VT_BUFFERS: flatbuffers::VOffsetT = 8;
646 pub const VT_COMPRESSION: flatbuffers::VOffsetT = 10;
647 pub const VT_VARIADICBUFFERCOUNTS: flatbuffers::VOffsetT = 12;
648
649 #[inline]
650 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
651 RecordBatch { _tab: table }
652 }
653 #[allow(unused_mut)]
654 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
655 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
656 args: &'args RecordBatchArgs<'args>,
657 ) -> flatbuffers::WIPOffset<RecordBatch<'bldr>> {
658 let mut builder = RecordBatchBuilder::new(_fbb);
659 builder.add_length(args.length);
660 if let Some(x) = args.variadicBufferCounts {
661 builder.add_variadicBufferCounts(x);
662 }
663 if let Some(x) = args.compression {
664 builder.add_compression(x);
665 }
666 if let Some(x) = args.buffers {
667 builder.add_buffers(x);
668 }
669 if let Some(x) = args.nodes {
670 builder.add_nodes(x);
671 }
672 builder.finish()
673 }
674
675 #[inline]
678 pub fn length(&self) -> i64 {
679 unsafe {
683 self._tab
684 .get::<i64>(RecordBatch::VT_LENGTH, Some(0))
685 .unwrap()
686 }
687 }
688 #[inline]
690 pub fn nodes(&self) -> Option<flatbuffers::Vector<'a, FieldNode>> {
691 unsafe {
695 self._tab
696 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, FieldNode>>>(
697 RecordBatch::VT_NODES,
698 None,
699 )
700 }
701 }
702 #[inline]
709 pub fn buffers(&self) -> Option<flatbuffers::Vector<'a, Buffer>> {
710 unsafe {
714 self._tab
715 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Buffer>>>(
716 RecordBatch::VT_BUFFERS,
717 None,
718 )
719 }
720 }
721 #[inline]
723 pub fn compression(&self) -> Option<BodyCompression<'a>> {
724 unsafe {
728 self._tab
729 .get::<flatbuffers::ForwardsUOffset<BodyCompression>>(
730 RecordBatch::VT_COMPRESSION,
731 None,
732 )
733 }
734 }
735 #[inline]
750 pub fn variadicBufferCounts(&self) -> Option<flatbuffers::Vector<'a, i64>> {
751 unsafe {
755 self._tab
756 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i64>>>(
757 RecordBatch::VT_VARIADICBUFFERCOUNTS,
758 None,
759 )
760 }
761 }
762}
763
764impl flatbuffers::Verifiable for RecordBatch<'_> {
765 #[inline]
766 fn run_verifier(
767 v: &mut flatbuffers::Verifier,
768 pos: usize,
769 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
770 use flatbuffers::Verifiable;
771 v.visit_table(pos)?
772 .visit_field::<i64>("length", Self::VT_LENGTH, false)?
773 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, FieldNode>>>(
774 "nodes",
775 Self::VT_NODES,
776 false,
777 )?
778 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Buffer>>>(
779 "buffers",
780 Self::VT_BUFFERS,
781 false,
782 )?
783 .visit_field::<flatbuffers::ForwardsUOffset<BodyCompression>>(
784 "compression",
785 Self::VT_COMPRESSION,
786 false,
787 )?
788 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, i64>>>(
789 "variadicBufferCounts",
790 Self::VT_VARIADICBUFFERCOUNTS,
791 false,
792 )?
793 .finish();
794 Ok(())
795 }
796}
797pub struct RecordBatchArgs<'a> {
798 pub length: i64,
799 pub nodes: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, FieldNode>>>,
800 pub buffers: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Buffer>>>,
801 pub compression: Option<flatbuffers::WIPOffset<BodyCompression<'a>>>,
802 pub variadicBufferCounts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, i64>>>,
803}
804impl<'a> Default for RecordBatchArgs<'a> {
805 #[inline]
806 fn default() -> Self {
807 RecordBatchArgs {
808 length: 0,
809 nodes: None,
810 buffers: None,
811 compression: None,
812 variadicBufferCounts: None,
813 }
814 }
815}
816
817pub struct RecordBatchBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
818 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
819 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
820}
821impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> RecordBatchBuilder<'a, 'b, A> {
822 #[inline]
823 pub fn add_length(&mut self, length: i64) {
824 self.fbb_
825 .push_slot::<i64>(RecordBatch::VT_LENGTH, length, 0);
826 }
827 #[inline]
828 pub fn add_nodes(&mut self, nodes: flatbuffers::WIPOffset<flatbuffers::Vector<'b, FieldNode>>) {
829 self.fbb_
830 .push_slot_always::<flatbuffers::WIPOffset<_>>(RecordBatch::VT_NODES, nodes);
831 }
832 #[inline]
833 pub fn add_buffers(
834 &mut self,
835 buffers: flatbuffers::WIPOffset<flatbuffers::Vector<'b, Buffer>>,
836 ) {
837 self.fbb_
838 .push_slot_always::<flatbuffers::WIPOffset<_>>(RecordBatch::VT_BUFFERS, buffers);
839 }
840 #[inline]
841 pub fn add_compression(&mut self, compression: flatbuffers::WIPOffset<BodyCompression<'b>>) {
842 self.fbb_
843 .push_slot_always::<flatbuffers::WIPOffset<BodyCompression>>(
844 RecordBatch::VT_COMPRESSION,
845 compression,
846 );
847 }
848 #[inline]
849 pub fn add_variadicBufferCounts(
850 &mut self,
851 variadicBufferCounts: flatbuffers::WIPOffset<flatbuffers::Vector<'b, i64>>,
852 ) {
853 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
854 RecordBatch::VT_VARIADICBUFFERCOUNTS,
855 variadicBufferCounts,
856 );
857 }
858 #[inline]
859 pub fn new(
860 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
861 ) -> RecordBatchBuilder<'a, 'b, A> {
862 let start = _fbb.start_table();
863 RecordBatchBuilder {
864 fbb_: _fbb,
865 start_: start,
866 }
867 }
868 #[inline]
869 pub fn finish(self) -> flatbuffers::WIPOffset<RecordBatch<'a>> {
870 let o = self.fbb_.end_table(self.start_);
871 flatbuffers::WIPOffset::new(o.value())
872 }
873}
874
875impl core::fmt::Debug for RecordBatch<'_> {
876 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
877 let mut ds = f.debug_struct("RecordBatch");
878 ds.field("length", &self.length());
879 ds.field("nodes", &self.nodes());
880 ds.field("buffers", &self.buffers());
881 ds.field("compression", &self.compression());
882 ds.field("variadicBufferCounts", &self.variadicBufferCounts());
883 ds.finish()
884 }
885}
886pub enum DictionaryBatchOffset {}
887#[derive(Copy, Clone, PartialEq)]
888
889pub struct DictionaryBatch<'a> {
896 pub _tab: flatbuffers::Table<'a>,
897}
898
899impl<'a> flatbuffers::Follow<'a> for DictionaryBatch<'a> {
900 type Inner = DictionaryBatch<'a>;
901 #[inline]
902 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
903 Self {
904 _tab: flatbuffers::Table::new(buf, loc),
905 }
906 }
907}
908
909impl<'a> DictionaryBatch<'a> {
910 pub const VT_ID: flatbuffers::VOffsetT = 4;
911 pub const VT_DATA: flatbuffers::VOffsetT = 6;
912 pub const VT_ISDELTA: flatbuffers::VOffsetT = 8;
913
914 #[inline]
915 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
916 DictionaryBatch { _tab: table }
917 }
918 #[allow(unused_mut)]
919 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
920 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
921 args: &'args DictionaryBatchArgs<'args>,
922 ) -> flatbuffers::WIPOffset<DictionaryBatch<'bldr>> {
923 let mut builder = DictionaryBatchBuilder::new(_fbb);
924 builder.add_id(args.id);
925 if let Some(x) = args.data {
926 builder.add_data(x);
927 }
928 builder.add_isDelta(args.isDelta);
929 builder.finish()
930 }
931
932 #[inline]
933 pub fn id(&self) -> i64 {
934 unsafe {
938 self._tab
939 .get::<i64>(DictionaryBatch::VT_ID, Some(0))
940 .unwrap()
941 }
942 }
943 #[inline]
944 pub fn data(&self) -> Option<RecordBatch<'a>> {
945 unsafe {
949 self._tab
950 .get::<flatbuffers::ForwardsUOffset<RecordBatch>>(DictionaryBatch::VT_DATA, None)
951 }
952 }
953 #[inline]
957 pub fn isDelta(&self) -> bool {
958 unsafe {
962 self._tab
963 .get::<bool>(DictionaryBatch::VT_ISDELTA, Some(false))
964 .unwrap()
965 }
966 }
967}
968
969impl flatbuffers::Verifiable for DictionaryBatch<'_> {
970 #[inline]
971 fn run_verifier(
972 v: &mut flatbuffers::Verifier,
973 pos: usize,
974 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
975 use flatbuffers::Verifiable;
976 v.visit_table(pos)?
977 .visit_field::<i64>("id", Self::VT_ID, false)?
978 .visit_field::<flatbuffers::ForwardsUOffset<RecordBatch>>("data", Self::VT_DATA, false)?
979 .visit_field::<bool>("isDelta", Self::VT_ISDELTA, false)?
980 .finish();
981 Ok(())
982 }
983}
984pub struct DictionaryBatchArgs<'a> {
985 pub id: i64,
986 pub data: Option<flatbuffers::WIPOffset<RecordBatch<'a>>>,
987 pub isDelta: bool,
988}
989impl<'a> Default for DictionaryBatchArgs<'a> {
990 #[inline]
991 fn default() -> Self {
992 DictionaryBatchArgs {
993 id: 0,
994 data: None,
995 isDelta: false,
996 }
997 }
998}
999
1000pub struct DictionaryBatchBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
1001 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1002 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
1003}
1004impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> DictionaryBatchBuilder<'a, 'b, A> {
1005 #[inline]
1006 pub fn add_id(&mut self, id: i64) {
1007 self.fbb_.push_slot::<i64>(DictionaryBatch::VT_ID, id, 0);
1008 }
1009 #[inline]
1010 pub fn add_data(&mut self, data: flatbuffers::WIPOffset<RecordBatch<'b>>) {
1011 self.fbb_
1012 .push_slot_always::<flatbuffers::WIPOffset<RecordBatch>>(
1013 DictionaryBatch::VT_DATA,
1014 data,
1015 );
1016 }
1017 #[inline]
1018 pub fn add_isDelta(&mut self, isDelta: bool) {
1019 self.fbb_
1020 .push_slot::<bool>(DictionaryBatch::VT_ISDELTA, isDelta, false);
1021 }
1022 #[inline]
1023 pub fn new(
1024 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1025 ) -> DictionaryBatchBuilder<'a, 'b, A> {
1026 let start = _fbb.start_table();
1027 DictionaryBatchBuilder {
1028 fbb_: _fbb,
1029 start_: start,
1030 }
1031 }
1032 #[inline]
1033 pub fn finish(self) -> flatbuffers::WIPOffset<DictionaryBatch<'a>> {
1034 let o = self.fbb_.end_table(self.start_);
1035 flatbuffers::WIPOffset::new(o.value())
1036 }
1037}
1038
1039impl core::fmt::Debug for DictionaryBatch<'_> {
1040 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1041 let mut ds = f.debug_struct("DictionaryBatch");
1042 ds.field("id", &self.id());
1043 ds.field("data", &self.data());
1044 ds.field("isDelta", &self.isDelta());
1045 ds.finish()
1046 }
1047}
1048pub enum MessageOffset {}
1049#[derive(Copy, Clone, PartialEq)]
1050
1051pub struct Message<'a> {
1052 pub _tab: flatbuffers::Table<'a>,
1053}
1054
1055impl<'a> flatbuffers::Follow<'a> for Message<'a> {
1056 type Inner = Message<'a>;
1057 #[inline]
1058 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
1059 Self {
1060 _tab: flatbuffers::Table::new(buf, loc),
1061 }
1062 }
1063}
1064
1065impl<'a> Message<'a> {
1066 pub const VT_VERSION: flatbuffers::VOffsetT = 4;
1067 pub const VT_HEADER_TYPE: flatbuffers::VOffsetT = 6;
1068 pub const VT_HEADER: flatbuffers::VOffsetT = 8;
1069 pub const VT_BODYLENGTH: flatbuffers::VOffsetT = 10;
1070 pub const VT_CUSTOM_METADATA: flatbuffers::VOffsetT = 12;
1071
1072 #[inline]
1073 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
1074 Message { _tab: table }
1075 }
1076 #[allow(unused_mut)]
1077 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
1078 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
1079 args: &'args MessageArgs<'args>,
1080 ) -> flatbuffers::WIPOffset<Message<'bldr>> {
1081 let mut builder = MessageBuilder::new(_fbb);
1082 builder.add_bodyLength(args.bodyLength);
1083 if let Some(x) = args.custom_metadata {
1084 builder.add_custom_metadata(x);
1085 }
1086 if let Some(x) = args.header {
1087 builder.add_header(x);
1088 }
1089 builder.add_version(args.version);
1090 builder.add_header_type(args.header_type);
1091 builder.finish()
1092 }
1093
1094 #[inline]
1095 pub fn version(&self) -> MetadataVersion {
1096 unsafe {
1100 self._tab
1101 .get::<MetadataVersion>(Message::VT_VERSION, Some(MetadataVersion::V1))
1102 .unwrap()
1103 }
1104 }
1105 #[inline]
1106 pub fn header_type(&self) -> MessageHeader {
1107 unsafe {
1111 self._tab
1112 .get::<MessageHeader>(Message::VT_HEADER_TYPE, Some(MessageHeader::NONE))
1113 .unwrap()
1114 }
1115 }
1116 #[inline]
1117 pub fn header(&self) -> Option<flatbuffers::Table<'a>> {
1118 unsafe {
1122 self._tab
1123 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
1124 Message::VT_HEADER,
1125 None,
1126 )
1127 }
1128 }
1129 #[inline]
1130 pub fn bodyLength(&self) -> i64 {
1131 unsafe {
1135 self._tab
1136 .get::<i64>(Message::VT_BODYLENGTH, Some(0))
1137 .unwrap()
1138 }
1139 }
1140 #[inline]
1141 pub fn custom_metadata(
1142 &self,
1143 ) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue<'a>>>> {
1144 unsafe {
1148 self._tab.get::<flatbuffers::ForwardsUOffset<
1149 flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue>>,
1150 >>(Message::VT_CUSTOM_METADATA, None)
1151 }
1152 }
1153 #[inline]
1154 #[allow(non_snake_case)]
1155 pub fn header_as_schema(&self) -> Option<Schema<'a>> {
1156 if self.header_type() == MessageHeader::Schema {
1157 self.header().map(|t| {
1158 unsafe { Schema::init_from_table(t) }
1162 })
1163 } else {
1164 None
1165 }
1166 }
1167
1168 #[inline]
1169 #[allow(non_snake_case)]
1170 pub fn header_as_dictionary_batch(&self) -> Option<DictionaryBatch<'a>> {
1171 if self.header_type() == MessageHeader::DictionaryBatch {
1172 self.header().map(|t| {
1173 unsafe { DictionaryBatch::init_from_table(t) }
1177 })
1178 } else {
1179 None
1180 }
1181 }
1182
1183 #[inline]
1184 #[allow(non_snake_case)]
1185 pub fn header_as_record_batch(&self) -> Option<RecordBatch<'a>> {
1186 if self.header_type() == MessageHeader::RecordBatch {
1187 self.header().map(|t| {
1188 unsafe { RecordBatch::init_from_table(t) }
1192 })
1193 } else {
1194 None
1195 }
1196 }
1197
1198 #[inline]
1199 #[allow(non_snake_case)]
1200 pub fn header_as_tensor(&self) -> Option<Tensor<'a>> {
1201 if self.header_type() == MessageHeader::Tensor {
1202 self.header().map(|t| {
1203 unsafe { Tensor::init_from_table(t) }
1207 })
1208 } else {
1209 None
1210 }
1211 }
1212
1213 #[inline]
1214 #[allow(non_snake_case)]
1215 pub fn header_as_sparse_tensor(&self) -> Option<SparseTensor<'a>> {
1216 if self.header_type() == MessageHeader::SparseTensor {
1217 self.header().map(|t| {
1218 unsafe { SparseTensor::init_from_table(t) }
1222 })
1223 } else {
1224 None
1225 }
1226 }
1227}
1228
1229impl flatbuffers::Verifiable for Message<'_> {
1230 #[inline]
1231 fn run_verifier(
1232 v: &mut flatbuffers::Verifier,
1233 pos: usize,
1234 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
1235 use flatbuffers::Verifiable;
1236 v.visit_table(pos)?
1237 .visit_field::<MetadataVersion>("version", Self::VT_VERSION, false)?
1238 .visit_union::<MessageHeader, _>(
1239 "header_type",
1240 Self::VT_HEADER_TYPE,
1241 "header",
1242 Self::VT_HEADER,
1243 false,
1244 |key, v, pos| match key {
1245 MessageHeader::Schema => v
1246 .verify_union_variant::<flatbuffers::ForwardsUOffset<Schema>>(
1247 "MessageHeader::Schema",
1248 pos,
1249 ),
1250 MessageHeader::DictionaryBatch => v
1251 .verify_union_variant::<flatbuffers::ForwardsUOffset<DictionaryBatch>>(
1252 "MessageHeader::DictionaryBatch",
1253 pos,
1254 ),
1255 MessageHeader::RecordBatch => v
1256 .verify_union_variant::<flatbuffers::ForwardsUOffset<RecordBatch>>(
1257 "MessageHeader::RecordBatch",
1258 pos,
1259 ),
1260 MessageHeader::Tensor => v
1261 .verify_union_variant::<flatbuffers::ForwardsUOffset<Tensor>>(
1262 "MessageHeader::Tensor",
1263 pos,
1264 ),
1265 MessageHeader::SparseTensor => v
1266 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseTensor>>(
1267 "MessageHeader::SparseTensor",
1268 pos,
1269 ),
1270 _ => Ok(()),
1271 },
1272 )?
1273 .visit_field::<i64>("bodyLength", Self::VT_BODYLENGTH, false)?
1274 .visit_field::<flatbuffers::ForwardsUOffset<
1275 flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<KeyValue>>,
1276 >>("custom_metadata", Self::VT_CUSTOM_METADATA, false)?
1277 .finish();
1278 Ok(())
1279 }
1280}
1281pub struct MessageArgs<'a> {
1282 pub version: MetadataVersion,
1283 pub header_type: MessageHeader,
1284 pub header: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
1285 pub bodyLength: i64,
1286 pub custom_metadata: Option<
1287 flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue<'a>>>>,
1288 >,
1289}
1290impl<'a> Default for MessageArgs<'a> {
1291 #[inline]
1292 fn default() -> Self {
1293 MessageArgs {
1294 version: MetadataVersion::V1,
1295 header_type: MessageHeader::NONE,
1296 header: None,
1297 bodyLength: 0,
1298 custom_metadata: None,
1299 }
1300 }
1301}
1302
1303pub struct MessageBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
1304 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1305 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
1306}
1307impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> MessageBuilder<'a, 'b, A> {
1308 #[inline]
1309 pub fn add_version(&mut self, version: MetadataVersion) {
1310 self.fbb_
1311 .push_slot::<MetadataVersion>(Message::VT_VERSION, version, MetadataVersion::V1);
1312 }
1313 #[inline]
1314 pub fn add_header_type(&mut self, header_type: MessageHeader) {
1315 self.fbb_.push_slot::<MessageHeader>(
1316 Message::VT_HEADER_TYPE,
1317 header_type,
1318 MessageHeader::NONE,
1319 );
1320 }
1321 #[inline]
1322 pub fn add_header(&mut self, header: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
1323 self.fbb_
1324 .push_slot_always::<flatbuffers::WIPOffset<_>>(Message::VT_HEADER, header);
1325 }
1326 #[inline]
1327 pub fn add_bodyLength(&mut self, bodyLength: i64) {
1328 self.fbb_
1329 .push_slot::<i64>(Message::VT_BODYLENGTH, bodyLength, 0);
1330 }
1331 #[inline]
1332 pub fn add_custom_metadata(
1333 &mut self,
1334 custom_metadata: flatbuffers::WIPOffset<
1335 flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<KeyValue<'b>>>,
1336 >,
1337 ) {
1338 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1339 Message::VT_CUSTOM_METADATA,
1340 custom_metadata,
1341 );
1342 }
1343 #[inline]
1344 pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> MessageBuilder<'a, 'b, A> {
1345 let start = _fbb.start_table();
1346 MessageBuilder {
1347 fbb_: _fbb,
1348 start_: start,
1349 }
1350 }
1351 #[inline]
1352 pub fn finish(self) -> flatbuffers::WIPOffset<Message<'a>> {
1353 let o = self.fbb_.end_table(self.start_);
1354 flatbuffers::WIPOffset::new(o.value())
1355 }
1356}
1357
1358impl core::fmt::Debug for Message<'_> {
1359 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1360 let mut ds = f.debug_struct("Message");
1361 ds.field("version", &self.version());
1362 ds.field("header_type", &self.header_type());
1363 match self.header_type() {
1364 MessageHeader::Schema => {
1365 if let Some(x) = self.header_as_schema() {
1366 ds.field("header", &x)
1367 } else {
1368 ds.field(
1369 "header",
1370 &"InvalidFlatbuffer: Union discriminant does not match value.",
1371 )
1372 }
1373 }
1374 MessageHeader::DictionaryBatch => {
1375 if let Some(x) = self.header_as_dictionary_batch() {
1376 ds.field("header", &x)
1377 } else {
1378 ds.field(
1379 "header",
1380 &"InvalidFlatbuffer: Union discriminant does not match value.",
1381 )
1382 }
1383 }
1384 MessageHeader::RecordBatch => {
1385 if let Some(x) = self.header_as_record_batch() {
1386 ds.field("header", &x)
1387 } else {
1388 ds.field(
1389 "header",
1390 &"InvalidFlatbuffer: Union discriminant does not match value.",
1391 )
1392 }
1393 }
1394 MessageHeader::Tensor => {
1395 if let Some(x) = self.header_as_tensor() {
1396 ds.field("header", &x)
1397 } else {
1398 ds.field(
1399 "header",
1400 &"InvalidFlatbuffer: Union discriminant does not match value.",
1401 )
1402 }
1403 }
1404 MessageHeader::SparseTensor => {
1405 if let Some(x) = self.header_as_sparse_tensor() {
1406 ds.field("header", &x)
1407 } else {
1408 ds.field(
1409 "header",
1410 &"InvalidFlatbuffer: Union discriminant does not match value.",
1411 )
1412 }
1413 }
1414 _ => {
1415 let x: Option<()> = None;
1416 ds.field("header", &x)
1417 }
1418 };
1419 ds.field("bodyLength", &self.bodyLength());
1420 ds.field("custom_metadata", &self.custom_metadata());
1421 ds.finish()
1422 }
1423}
1424#[inline]
1425pub fn root_as_message(buf: &[u8]) -> Result<Message, flatbuffers::InvalidFlatbuffer> {
1432 flatbuffers::root::<Message>(buf)
1433}
1434#[inline]
1435pub fn size_prefixed_root_as_message(
1442 buf: &[u8],
1443) -> Result<Message, flatbuffers::InvalidFlatbuffer> {
1444 flatbuffers::size_prefixed_root::<Message>(buf)
1445}
1446#[inline]
1447pub fn root_as_message_with_opts<'b, 'o>(
1454 opts: &'o flatbuffers::VerifierOptions,
1455 buf: &'b [u8],
1456) -> Result<Message<'b>, flatbuffers::InvalidFlatbuffer> {
1457 flatbuffers::root_with_opts::<Message<'b>>(opts, buf)
1458}
1459#[inline]
1460pub fn size_prefixed_root_as_message_with_opts<'b, 'o>(
1467 opts: &'o flatbuffers::VerifierOptions,
1468 buf: &'b [u8],
1469) -> Result<Message<'b>, flatbuffers::InvalidFlatbuffer> {
1470 flatbuffers::size_prefixed_root_with_opts::<Message<'b>>(opts, buf)
1471}
1472#[inline]
1473pub unsafe fn root_as_message_unchecked(buf: &[u8]) -> Message {
1477 flatbuffers::root_unchecked::<Message>(buf)
1478}
1479#[inline]
1480pub unsafe fn size_prefixed_root_as_message_unchecked(buf: &[u8]) -> Message {
1484 flatbuffers::size_prefixed_root_unchecked::<Message>(buf)
1485}
1486#[inline]
1487pub fn finish_message_buffer<'a, 'b, A: flatbuffers::Allocator + 'a>(
1488 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1489 root: flatbuffers::WIPOffset<Message<'a>>,
1490) {
1491 fbb.finish(root, None);
1492}
1493
1494#[inline]
1495pub fn finish_size_prefixed_message_buffer<'a, 'b, A: flatbuffers::Allocator + 'a>(
1496 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1497 root: flatbuffers::WIPOffset<Message<'a>>,
1498) {
1499 fbb.finish_size_prefixed(root, None);
1500}