1#![allow(dead_code)]
19#![allow(unused_imports)]
20
21use crate::gen::Schema::*;
22use crate::gen::Tensor::*;
23use flatbuffers::EndianScalar;
24use std::{cmp::Ordering, mem};
25#[deprecated(
30 since = "2.0.0",
31 note = "Use associated constants instead. This will no longer be generated in 2021."
32)]
33pub const ENUM_MIN_SPARSE_MATRIX_COMPRESSED_AXIS: i16 = 0;
34#[deprecated(
35 since = "2.0.0",
36 note = "Use associated constants instead. This will no longer be generated in 2021."
37)]
38pub const ENUM_MAX_SPARSE_MATRIX_COMPRESSED_AXIS: i16 = 1;
39#[deprecated(
40 since = "2.0.0",
41 note = "Use associated constants instead. This will no longer be generated in 2021."
42)]
43#[allow(non_camel_case_types)]
44pub const ENUM_VALUES_SPARSE_MATRIX_COMPRESSED_AXIS: [SparseMatrixCompressedAxis; 2] = [
45 SparseMatrixCompressedAxis::Row,
46 SparseMatrixCompressedAxis::Column,
47];
48
49#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
50#[repr(transparent)]
51pub struct SparseMatrixCompressedAxis(pub i16);
52#[allow(non_upper_case_globals)]
53impl SparseMatrixCompressedAxis {
54 pub const Row: Self = Self(0);
55 pub const Column: Self = Self(1);
56
57 pub const ENUM_MIN: i16 = 0;
58 pub const ENUM_MAX: i16 = 1;
59 pub const ENUM_VALUES: &'static [Self] = &[Self::Row, Self::Column];
60 pub fn variant_name(self) -> Option<&'static str> {
62 match self {
63 Self::Row => Some("Row"),
64 Self::Column => Some("Column"),
65 _ => None,
66 }
67 }
68}
69impl core::fmt::Debug for SparseMatrixCompressedAxis {
70 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
71 if let Some(name) = self.variant_name() {
72 f.write_str(name)
73 } else {
74 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
75 }
76 }
77}
78impl<'a> flatbuffers::Follow<'a> for SparseMatrixCompressedAxis {
79 type Inner = Self;
80 #[inline]
81 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
82 let b = flatbuffers::read_scalar_at::<i16>(buf, loc);
83 Self(b)
84 }
85}
86
87impl flatbuffers::Push for SparseMatrixCompressedAxis {
88 type Output = SparseMatrixCompressedAxis;
89 #[inline]
90 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
91 flatbuffers::emplace_scalar::<i16>(dst, self.0);
92 }
93}
94
95impl flatbuffers::EndianScalar for SparseMatrixCompressedAxis {
96 type Scalar = i16;
97 #[inline]
98 fn to_little_endian(self) -> i16 {
99 self.0.to_le()
100 }
101 #[inline]
102 #[allow(clippy::wrong_self_convention)]
103 fn from_little_endian(v: i16) -> Self {
104 let b = i16::from_le(v);
105 Self(b)
106 }
107}
108
109impl<'a> flatbuffers::Verifiable for SparseMatrixCompressedAxis {
110 #[inline]
111 fn run_verifier(
112 v: &mut flatbuffers::Verifier,
113 pos: usize,
114 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
115 use flatbuffers::Verifiable;
116 i16::run_verifier(v, pos)
117 }
118}
119
120impl flatbuffers::SimpleToVerifyInSlice for SparseMatrixCompressedAxis {}
121#[deprecated(
122 since = "2.0.0",
123 note = "Use associated constants instead. This will no longer be generated in 2021."
124)]
125pub const ENUM_MIN_SPARSE_TENSOR_INDEX: u8 = 0;
126#[deprecated(
127 since = "2.0.0",
128 note = "Use associated constants instead. This will no longer be generated in 2021."
129)]
130pub const ENUM_MAX_SPARSE_TENSOR_INDEX: u8 = 3;
131#[deprecated(
132 since = "2.0.0",
133 note = "Use associated constants instead. This will no longer be generated in 2021."
134)]
135#[allow(non_camel_case_types)]
136pub const ENUM_VALUES_SPARSE_TENSOR_INDEX: [SparseTensorIndex; 4] = [
137 SparseTensorIndex::NONE,
138 SparseTensorIndex::SparseTensorIndexCOO,
139 SparseTensorIndex::SparseMatrixIndexCSX,
140 SparseTensorIndex::SparseTensorIndexCSF,
141];
142
143#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
144#[repr(transparent)]
145pub struct SparseTensorIndex(pub u8);
146#[allow(non_upper_case_globals)]
147impl SparseTensorIndex {
148 pub const NONE: Self = Self(0);
149 pub const SparseTensorIndexCOO: Self = Self(1);
150 pub const SparseMatrixIndexCSX: Self = Self(2);
151 pub const SparseTensorIndexCSF: Self = Self(3);
152
153 pub const ENUM_MIN: u8 = 0;
154 pub const ENUM_MAX: u8 = 3;
155 pub const ENUM_VALUES: &'static [Self] = &[
156 Self::NONE,
157 Self::SparseTensorIndexCOO,
158 Self::SparseMatrixIndexCSX,
159 Self::SparseTensorIndexCSF,
160 ];
161 pub fn variant_name(self) -> Option<&'static str> {
163 match self {
164 Self::NONE => Some("NONE"),
165 Self::SparseTensorIndexCOO => Some("SparseTensorIndexCOO"),
166 Self::SparseMatrixIndexCSX => Some("SparseMatrixIndexCSX"),
167 Self::SparseTensorIndexCSF => Some("SparseTensorIndexCSF"),
168 _ => None,
169 }
170 }
171}
172impl core::fmt::Debug for SparseTensorIndex {
173 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
174 if let Some(name) = self.variant_name() {
175 f.write_str(name)
176 } else {
177 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
178 }
179 }
180}
181impl<'a> flatbuffers::Follow<'a> for SparseTensorIndex {
182 type Inner = Self;
183 #[inline]
184 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
185 let b = flatbuffers::read_scalar_at::<u8>(buf, loc);
186 Self(b)
187 }
188}
189
190impl flatbuffers::Push for SparseTensorIndex {
191 type Output = SparseTensorIndex;
192 #[inline]
193 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
194 flatbuffers::emplace_scalar::<u8>(dst, self.0);
195 }
196}
197
198impl flatbuffers::EndianScalar for SparseTensorIndex {
199 type Scalar = u8;
200 #[inline]
201 fn to_little_endian(self) -> u8 {
202 self.0.to_le()
203 }
204 #[inline]
205 #[allow(clippy::wrong_self_convention)]
206 fn from_little_endian(v: u8) -> Self {
207 let b = u8::from_le(v);
208 Self(b)
209 }
210}
211
212impl<'a> flatbuffers::Verifiable for SparseTensorIndex {
213 #[inline]
214 fn run_verifier(
215 v: &mut flatbuffers::Verifier,
216 pos: usize,
217 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
218 use flatbuffers::Verifiable;
219 u8::run_verifier(v, pos)
220 }
221}
222
223impl flatbuffers::SimpleToVerifyInSlice for SparseTensorIndex {}
224pub struct SparseTensorIndexUnionTableOffset {}
225
226pub enum SparseTensorIndexCOOOffset {}
227#[derive(Copy, Clone, PartialEq)]
228
229pub struct SparseTensorIndexCOO<'a> {
262 pub _tab: flatbuffers::Table<'a>,
263}
264
265impl<'a> flatbuffers::Follow<'a> for SparseTensorIndexCOO<'a> {
266 type Inner = SparseTensorIndexCOO<'a>;
267 #[inline]
268 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
269 Self {
270 _tab: flatbuffers::Table::new(buf, loc),
271 }
272 }
273}
274
275impl<'a> SparseTensorIndexCOO<'a> {
276 pub const VT_INDICESTYPE: flatbuffers::VOffsetT = 4;
277 pub const VT_INDICESSTRIDES: flatbuffers::VOffsetT = 6;
278 pub const VT_INDICESBUFFER: flatbuffers::VOffsetT = 8;
279 pub const VT_ISCANONICAL: flatbuffers::VOffsetT = 10;
280
281 #[inline]
282 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
283 SparseTensorIndexCOO { _tab: table }
284 }
285 #[allow(unused_mut)]
286 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
287 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
288 args: &'args SparseTensorIndexCOOArgs<'args>,
289 ) -> flatbuffers::WIPOffset<SparseTensorIndexCOO<'bldr>> {
290 let mut builder = SparseTensorIndexCOOBuilder::new(_fbb);
291 if let Some(x) = args.indicesBuffer {
292 builder.add_indicesBuffer(x);
293 }
294 if let Some(x) = args.indicesStrides {
295 builder.add_indicesStrides(x);
296 }
297 if let Some(x) = args.indicesType {
298 builder.add_indicesType(x);
299 }
300 builder.add_isCanonical(args.isCanonical);
301 builder.finish()
302 }
303
304 #[inline]
306 pub fn indicesType(&self) -> Int<'a> {
307 unsafe {
311 self._tab
312 .get::<flatbuffers::ForwardsUOffset<Int>>(
313 SparseTensorIndexCOO::VT_INDICESTYPE,
314 None,
315 )
316 .unwrap()
317 }
318 }
319 #[inline]
322 pub fn indicesStrides(&self) -> Option<flatbuffers::Vector<'a, i64>> {
323 unsafe {
327 self._tab
328 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i64>>>(
329 SparseTensorIndexCOO::VT_INDICESSTRIDES,
330 None,
331 )
332 }
333 }
334 #[inline]
336 pub fn indicesBuffer(&self) -> &'a Buffer {
337 unsafe {
341 self._tab
342 .get::<Buffer>(SparseTensorIndexCOO::VT_INDICESBUFFER, None)
343 .unwrap()
344 }
345 }
346 #[inline]
352 pub fn isCanonical(&self) -> bool {
353 unsafe {
357 self._tab
358 .get::<bool>(SparseTensorIndexCOO::VT_ISCANONICAL, Some(false))
359 .unwrap()
360 }
361 }
362}
363
364impl flatbuffers::Verifiable for SparseTensorIndexCOO<'_> {
365 #[inline]
366 fn run_verifier(
367 v: &mut flatbuffers::Verifier,
368 pos: usize,
369 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
370 use flatbuffers::Verifiable;
371 v.visit_table(pos)?
372 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
373 "indicesType",
374 Self::VT_INDICESTYPE,
375 true,
376 )?
377 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, i64>>>(
378 "indicesStrides",
379 Self::VT_INDICESSTRIDES,
380 false,
381 )?
382 .visit_field::<Buffer>("indicesBuffer", Self::VT_INDICESBUFFER, true)?
383 .visit_field::<bool>("isCanonical", Self::VT_ISCANONICAL, false)?
384 .finish();
385 Ok(())
386 }
387}
388pub struct SparseTensorIndexCOOArgs<'a> {
389 pub indicesType: Option<flatbuffers::WIPOffset<Int<'a>>>,
390 pub indicesStrides: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, i64>>>,
391 pub indicesBuffer: Option<&'a Buffer>,
392 pub isCanonical: bool,
393}
394impl<'a> Default for SparseTensorIndexCOOArgs<'a> {
395 #[inline]
396 fn default() -> Self {
397 SparseTensorIndexCOOArgs {
398 indicesType: None, indicesStrides: None,
400 indicesBuffer: None, isCanonical: false,
402 }
403 }
404}
405
406pub struct SparseTensorIndexCOOBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
407 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
408 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
409}
410impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SparseTensorIndexCOOBuilder<'a, 'b, A> {
411 #[inline]
412 pub fn add_indicesType(&mut self, indicesType: flatbuffers::WIPOffset<Int<'b>>) {
413 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
414 SparseTensorIndexCOO::VT_INDICESTYPE,
415 indicesType,
416 );
417 }
418 #[inline]
419 pub fn add_indicesStrides(
420 &mut self,
421 indicesStrides: flatbuffers::WIPOffset<flatbuffers::Vector<'b, i64>>,
422 ) {
423 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
424 SparseTensorIndexCOO::VT_INDICESSTRIDES,
425 indicesStrides,
426 );
427 }
428 #[inline]
429 pub fn add_indicesBuffer(&mut self, indicesBuffer: &Buffer) {
430 self.fbb_
431 .push_slot_always::<&Buffer>(SparseTensorIndexCOO::VT_INDICESBUFFER, indicesBuffer);
432 }
433 #[inline]
434 pub fn add_isCanonical(&mut self, isCanonical: bool) {
435 self.fbb_
436 .push_slot::<bool>(SparseTensorIndexCOO::VT_ISCANONICAL, isCanonical, false);
437 }
438 #[inline]
439 pub fn new(
440 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
441 ) -> SparseTensorIndexCOOBuilder<'a, 'b, A> {
442 let start = _fbb.start_table();
443 SparseTensorIndexCOOBuilder {
444 fbb_: _fbb,
445 start_: start,
446 }
447 }
448 #[inline]
449 pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensorIndexCOO<'a>> {
450 let o = self.fbb_.end_table(self.start_);
451 self.fbb_
452 .required(o, SparseTensorIndexCOO::VT_INDICESTYPE, "indicesType");
453 self.fbb_
454 .required(o, SparseTensorIndexCOO::VT_INDICESBUFFER, "indicesBuffer");
455 flatbuffers::WIPOffset::new(o.value())
456 }
457}
458
459impl core::fmt::Debug for SparseTensorIndexCOO<'_> {
460 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
461 let mut ds = f.debug_struct("SparseTensorIndexCOO");
462 ds.field("indicesType", &self.indicesType());
463 ds.field("indicesStrides", &self.indicesStrides());
464 ds.field("indicesBuffer", &self.indicesBuffer());
465 ds.field("isCanonical", &self.isCanonical());
466 ds.finish()
467 }
468}
469pub enum SparseMatrixIndexCSXOffset {}
470#[derive(Copy, Clone, PartialEq)]
471
472pub struct SparseMatrixIndexCSX<'a> {
474 pub _tab: flatbuffers::Table<'a>,
475}
476
477impl<'a> flatbuffers::Follow<'a> for SparseMatrixIndexCSX<'a> {
478 type Inner = SparseMatrixIndexCSX<'a>;
479 #[inline]
480 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
481 Self {
482 _tab: flatbuffers::Table::new(buf, loc),
483 }
484 }
485}
486
487impl<'a> SparseMatrixIndexCSX<'a> {
488 pub const VT_COMPRESSEDAXIS: flatbuffers::VOffsetT = 4;
489 pub const VT_INDPTRTYPE: flatbuffers::VOffsetT = 6;
490 pub const VT_INDPTRBUFFER: flatbuffers::VOffsetT = 8;
491 pub const VT_INDICESTYPE: flatbuffers::VOffsetT = 10;
492 pub const VT_INDICESBUFFER: flatbuffers::VOffsetT = 12;
493
494 #[inline]
495 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
496 SparseMatrixIndexCSX { _tab: table }
497 }
498 #[allow(unused_mut)]
499 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
500 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
501 args: &'args SparseMatrixIndexCSXArgs<'args>,
502 ) -> flatbuffers::WIPOffset<SparseMatrixIndexCSX<'bldr>> {
503 let mut builder = SparseMatrixIndexCSXBuilder::new(_fbb);
504 if let Some(x) = args.indicesBuffer {
505 builder.add_indicesBuffer(x);
506 }
507 if let Some(x) = args.indicesType {
508 builder.add_indicesType(x);
509 }
510 if let Some(x) = args.indptrBuffer {
511 builder.add_indptrBuffer(x);
512 }
513 if let Some(x) = args.indptrType {
514 builder.add_indptrType(x);
515 }
516 builder.add_compressedAxis(args.compressedAxis);
517 builder.finish()
518 }
519
520 #[inline]
522 pub fn compressedAxis(&self) -> SparseMatrixCompressedAxis {
523 unsafe {
527 self._tab
528 .get::<SparseMatrixCompressedAxis>(
529 SparseMatrixIndexCSX::VT_COMPRESSEDAXIS,
530 Some(SparseMatrixCompressedAxis::Row),
531 )
532 .unwrap()
533 }
534 }
535 #[inline]
537 pub fn indptrType(&self) -> Int<'a> {
538 unsafe {
542 self._tab
543 .get::<flatbuffers::ForwardsUOffset<Int>>(SparseMatrixIndexCSX::VT_INDPTRTYPE, None)
544 .unwrap()
545 }
546 }
547 #[inline]
571 pub fn indptrBuffer(&self) -> &'a Buffer {
572 unsafe {
576 self._tab
577 .get::<Buffer>(SparseMatrixIndexCSX::VT_INDPTRBUFFER, None)
578 .unwrap()
579 }
580 }
581 #[inline]
583 pub fn indicesType(&self) -> Int<'a> {
584 unsafe {
588 self._tab
589 .get::<flatbuffers::ForwardsUOffset<Int>>(
590 SparseMatrixIndexCSX::VT_INDICESTYPE,
591 None,
592 )
593 .unwrap()
594 }
595 }
596 #[inline]
606 pub fn indicesBuffer(&self) -> &'a Buffer {
607 unsafe {
611 self._tab
612 .get::<Buffer>(SparseMatrixIndexCSX::VT_INDICESBUFFER, None)
613 .unwrap()
614 }
615 }
616}
617
618impl flatbuffers::Verifiable for SparseMatrixIndexCSX<'_> {
619 #[inline]
620 fn run_verifier(
621 v: &mut flatbuffers::Verifier,
622 pos: usize,
623 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
624 use flatbuffers::Verifiable;
625 v.visit_table(pos)?
626 .visit_field::<SparseMatrixCompressedAxis>(
627 "compressedAxis",
628 Self::VT_COMPRESSEDAXIS,
629 false,
630 )?
631 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
632 "indptrType",
633 Self::VT_INDPTRTYPE,
634 true,
635 )?
636 .visit_field::<Buffer>("indptrBuffer", Self::VT_INDPTRBUFFER, true)?
637 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
638 "indicesType",
639 Self::VT_INDICESTYPE,
640 true,
641 )?
642 .visit_field::<Buffer>("indicesBuffer", Self::VT_INDICESBUFFER, true)?
643 .finish();
644 Ok(())
645 }
646}
647pub struct SparseMatrixIndexCSXArgs<'a> {
648 pub compressedAxis: SparseMatrixCompressedAxis,
649 pub indptrType: Option<flatbuffers::WIPOffset<Int<'a>>>,
650 pub indptrBuffer: Option<&'a Buffer>,
651 pub indicesType: Option<flatbuffers::WIPOffset<Int<'a>>>,
652 pub indicesBuffer: Option<&'a Buffer>,
653}
654impl<'a> Default for SparseMatrixIndexCSXArgs<'a> {
655 #[inline]
656 fn default() -> Self {
657 SparseMatrixIndexCSXArgs {
658 compressedAxis: SparseMatrixCompressedAxis::Row,
659 indptrType: None, indptrBuffer: None, indicesType: None, indicesBuffer: None, }
664 }
665}
666
667pub struct SparseMatrixIndexCSXBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
668 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
669 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
670}
671impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SparseMatrixIndexCSXBuilder<'a, 'b, A> {
672 #[inline]
673 pub fn add_compressedAxis(&mut self, compressedAxis: SparseMatrixCompressedAxis) {
674 self.fbb_.push_slot::<SparseMatrixCompressedAxis>(
675 SparseMatrixIndexCSX::VT_COMPRESSEDAXIS,
676 compressedAxis,
677 SparseMatrixCompressedAxis::Row,
678 );
679 }
680 #[inline]
681 pub fn add_indptrType(&mut self, indptrType: flatbuffers::WIPOffset<Int<'b>>) {
682 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
683 SparseMatrixIndexCSX::VT_INDPTRTYPE,
684 indptrType,
685 );
686 }
687 #[inline]
688 pub fn add_indptrBuffer(&mut self, indptrBuffer: &Buffer) {
689 self.fbb_
690 .push_slot_always::<&Buffer>(SparseMatrixIndexCSX::VT_INDPTRBUFFER, indptrBuffer);
691 }
692 #[inline]
693 pub fn add_indicesType(&mut self, indicesType: flatbuffers::WIPOffset<Int<'b>>) {
694 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
695 SparseMatrixIndexCSX::VT_INDICESTYPE,
696 indicesType,
697 );
698 }
699 #[inline]
700 pub fn add_indicesBuffer(&mut self, indicesBuffer: &Buffer) {
701 self.fbb_
702 .push_slot_always::<&Buffer>(SparseMatrixIndexCSX::VT_INDICESBUFFER, indicesBuffer);
703 }
704 #[inline]
705 pub fn new(
706 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
707 ) -> SparseMatrixIndexCSXBuilder<'a, 'b, A> {
708 let start = _fbb.start_table();
709 SparseMatrixIndexCSXBuilder {
710 fbb_: _fbb,
711 start_: start,
712 }
713 }
714 #[inline]
715 pub fn finish(self) -> flatbuffers::WIPOffset<SparseMatrixIndexCSX<'a>> {
716 let o = self.fbb_.end_table(self.start_);
717 self.fbb_
718 .required(o, SparseMatrixIndexCSX::VT_INDPTRTYPE, "indptrType");
719 self.fbb_
720 .required(o, SparseMatrixIndexCSX::VT_INDPTRBUFFER, "indptrBuffer");
721 self.fbb_
722 .required(o, SparseMatrixIndexCSX::VT_INDICESTYPE, "indicesType");
723 self.fbb_
724 .required(o, SparseMatrixIndexCSX::VT_INDICESBUFFER, "indicesBuffer");
725 flatbuffers::WIPOffset::new(o.value())
726 }
727}
728
729impl core::fmt::Debug for SparseMatrixIndexCSX<'_> {
730 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
731 let mut ds = f.debug_struct("SparseMatrixIndexCSX");
732 ds.field("compressedAxis", &self.compressedAxis());
733 ds.field("indptrType", &self.indptrType());
734 ds.field("indptrBuffer", &self.indptrBuffer());
735 ds.field("indicesType", &self.indicesType());
736 ds.field("indicesBuffer", &self.indicesBuffer());
737 ds.finish()
738 }
739}
740pub enum SparseTensorIndexCSFOffset {}
741#[derive(Copy, Clone, PartialEq)]
742
743pub struct SparseTensorIndexCSF<'a> {
745 pub _tab: flatbuffers::Table<'a>,
746}
747
748impl<'a> flatbuffers::Follow<'a> for SparseTensorIndexCSF<'a> {
749 type Inner = SparseTensorIndexCSF<'a>;
750 #[inline]
751 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
752 Self {
753 _tab: flatbuffers::Table::new(buf, loc),
754 }
755 }
756}
757
758impl<'a> SparseTensorIndexCSF<'a> {
759 pub const VT_INDPTRTYPE: flatbuffers::VOffsetT = 4;
760 pub const VT_INDPTRBUFFERS: flatbuffers::VOffsetT = 6;
761 pub const VT_INDICESTYPE: flatbuffers::VOffsetT = 8;
762 pub const VT_INDICESBUFFERS: flatbuffers::VOffsetT = 10;
763 pub const VT_AXISORDER: flatbuffers::VOffsetT = 12;
764
765 #[inline]
766 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
767 SparseTensorIndexCSF { _tab: table }
768 }
769 #[allow(unused_mut)]
770 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
771 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
772 args: &'args SparseTensorIndexCSFArgs<'args>,
773 ) -> flatbuffers::WIPOffset<SparseTensorIndexCSF<'bldr>> {
774 let mut builder = SparseTensorIndexCSFBuilder::new(_fbb);
775 if let Some(x) = args.axisOrder {
776 builder.add_axisOrder(x);
777 }
778 if let Some(x) = args.indicesBuffers {
779 builder.add_indicesBuffers(x);
780 }
781 if let Some(x) = args.indicesType {
782 builder.add_indicesType(x);
783 }
784 if let Some(x) = args.indptrBuffers {
785 builder.add_indptrBuffers(x);
786 }
787 if let Some(x) = args.indptrType {
788 builder.add_indptrType(x);
789 }
790 builder.finish()
791 }
792
793 #[inline]
825 pub fn indptrType(&self) -> Int<'a> {
826 unsafe {
830 self._tab
831 .get::<flatbuffers::ForwardsUOffset<Int>>(SparseTensorIndexCSF::VT_INDPTRTYPE, None)
832 .unwrap()
833 }
834 }
835 #[inline]
850 pub fn indptrBuffers(&self) -> flatbuffers::Vector<'a, Buffer> {
851 unsafe {
855 self._tab
856 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Buffer>>>(
857 SparseTensorIndexCSF::VT_INDPTRBUFFERS,
858 None,
859 )
860 .unwrap()
861 }
862 }
863 #[inline]
865 pub fn indicesType(&self) -> Int<'a> {
866 unsafe {
870 self._tab
871 .get::<flatbuffers::ForwardsUOffset<Int>>(
872 SparseTensorIndexCSF::VT_INDICESTYPE,
873 None,
874 )
875 .unwrap()
876 }
877 }
878 #[inline]
890 pub fn indicesBuffers(&self) -> flatbuffers::Vector<'a, Buffer> {
891 unsafe {
895 self._tab
896 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Buffer>>>(
897 SparseTensorIndexCSF::VT_INDICESBUFFERS,
898 None,
899 )
900 .unwrap()
901 }
902 }
903 #[inline]
910 pub fn axisOrder(&self) -> flatbuffers::Vector<'a, i32> {
911 unsafe {
915 self._tab
916 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i32>>>(
917 SparseTensorIndexCSF::VT_AXISORDER,
918 None,
919 )
920 .unwrap()
921 }
922 }
923}
924
925impl flatbuffers::Verifiable for SparseTensorIndexCSF<'_> {
926 #[inline]
927 fn run_verifier(
928 v: &mut flatbuffers::Verifier,
929 pos: usize,
930 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
931 use flatbuffers::Verifiable;
932 v.visit_table(pos)?
933 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
934 "indptrType",
935 Self::VT_INDPTRTYPE,
936 true,
937 )?
938 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Buffer>>>(
939 "indptrBuffers",
940 Self::VT_INDPTRBUFFERS,
941 true,
942 )?
943 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
944 "indicesType",
945 Self::VT_INDICESTYPE,
946 true,
947 )?
948 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Buffer>>>(
949 "indicesBuffers",
950 Self::VT_INDICESBUFFERS,
951 true,
952 )?
953 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, i32>>>(
954 "axisOrder",
955 Self::VT_AXISORDER,
956 true,
957 )?
958 .finish();
959 Ok(())
960 }
961}
962pub struct SparseTensorIndexCSFArgs<'a> {
963 pub indptrType: Option<flatbuffers::WIPOffset<Int<'a>>>,
964 pub indptrBuffers: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Buffer>>>,
965 pub indicesType: Option<flatbuffers::WIPOffset<Int<'a>>>,
966 pub indicesBuffers: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Buffer>>>,
967 pub axisOrder: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, i32>>>,
968}
969impl<'a> Default for SparseTensorIndexCSFArgs<'a> {
970 #[inline]
971 fn default() -> Self {
972 SparseTensorIndexCSFArgs {
973 indptrType: None, indptrBuffers: None, indicesType: None, indicesBuffers: None, axisOrder: None, }
979 }
980}
981
982pub struct SparseTensorIndexCSFBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
983 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
984 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
985}
986impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SparseTensorIndexCSFBuilder<'a, 'b, A> {
987 #[inline]
988 pub fn add_indptrType(&mut self, indptrType: flatbuffers::WIPOffset<Int<'b>>) {
989 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
990 SparseTensorIndexCSF::VT_INDPTRTYPE,
991 indptrType,
992 );
993 }
994 #[inline]
995 pub fn add_indptrBuffers(
996 &mut self,
997 indptrBuffers: flatbuffers::WIPOffset<flatbuffers::Vector<'b, Buffer>>,
998 ) {
999 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1000 SparseTensorIndexCSF::VT_INDPTRBUFFERS,
1001 indptrBuffers,
1002 );
1003 }
1004 #[inline]
1005 pub fn add_indicesType(&mut self, indicesType: flatbuffers::WIPOffset<Int<'b>>) {
1006 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
1007 SparseTensorIndexCSF::VT_INDICESTYPE,
1008 indicesType,
1009 );
1010 }
1011 #[inline]
1012 pub fn add_indicesBuffers(
1013 &mut self,
1014 indicesBuffers: flatbuffers::WIPOffset<flatbuffers::Vector<'b, Buffer>>,
1015 ) {
1016 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1017 SparseTensorIndexCSF::VT_INDICESBUFFERS,
1018 indicesBuffers,
1019 );
1020 }
1021 #[inline]
1022 pub fn add_axisOrder(
1023 &mut self,
1024 axisOrder: flatbuffers::WIPOffset<flatbuffers::Vector<'b, i32>>,
1025 ) {
1026 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1027 SparseTensorIndexCSF::VT_AXISORDER,
1028 axisOrder,
1029 );
1030 }
1031 #[inline]
1032 pub fn new(
1033 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1034 ) -> SparseTensorIndexCSFBuilder<'a, 'b, A> {
1035 let start = _fbb.start_table();
1036 SparseTensorIndexCSFBuilder {
1037 fbb_: _fbb,
1038 start_: start,
1039 }
1040 }
1041 #[inline]
1042 pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensorIndexCSF<'a>> {
1043 let o = self.fbb_.end_table(self.start_);
1044 self.fbb_
1045 .required(o, SparseTensorIndexCSF::VT_INDPTRTYPE, "indptrType");
1046 self.fbb_
1047 .required(o, SparseTensorIndexCSF::VT_INDPTRBUFFERS, "indptrBuffers");
1048 self.fbb_
1049 .required(o, SparseTensorIndexCSF::VT_INDICESTYPE, "indicesType");
1050 self.fbb_
1051 .required(o, SparseTensorIndexCSF::VT_INDICESBUFFERS, "indicesBuffers");
1052 self.fbb_
1053 .required(o, SparseTensorIndexCSF::VT_AXISORDER, "axisOrder");
1054 flatbuffers::WIPOffset::new(o.value())
1055 }
1056}
1057
1058impl core::fmt::Debug for SparseTensorIndexCSF<'_> {
1059 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1060 let mut ds = f.debug_struct("SparseTensorIndexCSF");
1061 ds.field("indptrType", &self.indptrType());
1062 ds.field("indptrBuffers", &self.indptrBuffers());
1063 ds.field("indicesType", &self.indicesType());
1064 ds.field("indicesBuffers", &self.indicesBuffers());
1065 ds.field("axisOrder", &self.axisOrder());
1066 ds.finish()
1067 }
1068}
1069pub enum SparseTensorOffset {}
1070#[derive(Copy, Clone, PartialEq)]
1071
1072pub struct SparseTensor<'a> {
1073 pub _tab: flatbuffers::Table<'a>,
1074}
1075
1076impl<'a> flatbuffers::Follow<'a> for SparseTensor<'a> {
1077 type Inner = SparseTensor<'a>;
1078 #[inline]
1079 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
1080 Self {
1081 _tab: flatbuffers::Table::new(buf, loc),
1082 }
1083 }
1084}
1085
1086impl<'a> SparseTensor<'a> {
1087 pub const VT_TYPE_TYPE: flatbuffers::VOffsetT = 4;
1088 pub const VT_TYPE_: flatbuffers::VOffsetT = 6;
1089 pub const VT_SHAPE: flatbuffers::VOffsetT = 8;
1090 pub const VT_NON_ZERO_LENGTH: flatbuffers::VOffsetT = 10;
1091 pub const VT_SPARSEINDEX_TYPE: flatbuffers::VOffsetT = 12;
1092 pub const VT_SPARSEINDEX: flatbuffers::VOffsetT = 14;
1093 pub const VT_DATA: flatbuffers::VOffsetT = 16;
1094
1095 #[inline]
1096 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
1097 SparseTensor { _tab: table }
1098 }
1099 #[allow(unused_mut)]
1100 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
1101 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
1102 args: &'args SparseTensorArgs<'args>,
1103 ) -> flatbuffers::WIPOffset<SparseTensor<'bldr>> {
1104 let mut builder = SparseTensorBuilder::new(_fbb);
1105 builder.add_non_zero_length(args.non_zero_length);
1106 if let Some(x) = args.data {
1107 builder.add_data(x);
1108 }
1109 if let Some(x) = args.sparseIndex {
1110 builder.add_sparseIndex(x);
1111 }
1112 if let Some(x) = args.shape {
1113 builder.add_shape(x);
1114 }
1115 if let Some(x) = args.type_ {
1116 builder.add_type_(x);
1117 }
1118 builder.add_sparseIndex_type(args.sparseIndex_type);
1119 builder.add_type_type(args.type_type);
1120 builder.finish()
1121 }
1122
1123 #[inline]
1124 pub fn type_type(&self) -> Type {
1125 unsafe {
1129 self._tab
1130 .get::<Type>(SparseTensor::VT_TYPE_TYPE, Some(Type::NONE))
1131 .unwrap()
1132 }
1133 }
1134 #[inline]
1138 pub fn type_(&self) -> flatbuffers::Table<'a> {
1139 unsafe {
1143 self._tab
1144 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
1145 SparseTensor::VT_TYPE_,
1146 None,
1147 )
1148 .unwrap()
1149 }
1150 }
1151 #[inline]
1153 pub fn shape(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<TensorDim<'a>>> {
1154 unsafe {
1158 self._tab
1159 .get::<flatbuffers::ForwardsUOffset<
1160 flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<TensorDim>>,
1161 >>(SparseTensor::VT_SHAPE, None)
1162 .unwrap()
1163 }
1164 }
1165 #[inline]
1167 pub fn non_zero_length(&self) -> i64 {
1168 unsafe {
1172 self._tab
1173 .get::<i64>(SparseTensor::VT_NON_ZERO_LENGTH, Some(0))
1174 .unwrap()
1175 }
1176 }
1177 #[inline]
1178 pub fn sparseIndex_type(&self) -> SparseTensorIndex {
1179 unsafe {
1183 self._tab
1184 .get::<SparseTensorIndex>(
1185 SparseTensor::VT_SPARSEINDEX_TYPE,
1186 Some(SparseTensorIndex::NONE),
1187 )
1188 .unwrap()
1189 }
1190 }
1191 #[inline]
1193 pub fn sparseIndex(&self) -> flatbuffers::Table<'a> {
1194 unsafe {
1198 self._tab
1199 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
1200 SparseTensor::VT_SPARSEINDEX,
1201 None,
1202 )
1203 .unwrap()
1204 }
1205 }
1206 #[inline]
1208 pub fn data(&self) -> &'a Buffer {
1209 unsafe {
1213 self._tab
1214 .get::<Buffer>(SparseTensor::VT_DATA, None)
1215 .unwrap()
1216 }
1217 }
1218 #[inline]
1219 #[allow(non_snake_case)]
1220 pub fn type_as_null(&self) -> Option<Null<'a>> {
1221 if self.type_type() == Type::Null {
1222 let u = self.type_();
1223 Some(unsafe { Null::init_from_table(u) })
1227 } else {
1228 None
1229 }
1230 }
1231
1232 #[inline]
1233 #[allow(non_snake_case)]
1234 pub fn type_as_int(&self) -> Option<Int<'a>> {
1235 if self.type_type() == Type::Int {
1236 let u = self.type_();
1237 Some(unsafe { Int::init_from_table(u) })
1241 } else {
1242 None
1243 }
1244 }
1245
1246 #[inline]
1247 #[allow(non_snake_case)]
1248 pub fn type_as_floating_point(&self) -> Option<FloatingPoint<'a>> {
1249 if self.type_type() == Type::FloatingPoint {
1250 let u = self.type_();
1251 Some(unsafe { FloatingPoint::init_from_table(u) })
1255 } else {
1256 None
1257 }
1258 }
1259
1260 #[inline]
1261 #[allow(non_snake_case)]
1262 pub fn type_as_binary(&self) -> Option<Binary<'a>> {
1263 if self.type_type() == Type::Binary {
1264 let u = self.type_();
1265 Some(unsafe { Binary::init_from_table(u) })
1269 } else {
1270 None
1271 }
1272 }
1273
1274 #[inline]
1275 #[allow(non_snake_case)]
1276 pub fn type_as_utf_8(&self) -> Option<Utf8<'a>> {
1277 if self.type_type() == Type::Utf8 {
1278 let u = self.type_();
1279 Some(unsafe { Utf8::init_from_table(u) })
1283 } else {
1284 None
1285 }
1286 }
1287
1288 #[inline]
1289 #[allow(non_snake_case)]
1290 pub fn type_as_bool(&self) -> Option<Bool<'a>> {
1291 if self.type_type() == Type::Bool {
1292 let u = self.type_();
1293 Some(unsafe { Bool::init_from_table(u) })
1297 } else {
1298 None
1299 }
1300 }
1301
1302 #[inline]
1303 #[allow(non_snake_case)]
1304 pub fn type_as_decimal(&self) -> Option<Decimal<'a>> {
1305 if self.type_type() == Type::Decimal {
1306 let u = self.type_();
1307 Some(unsafe { Decimal::init_from_table(u) })
1311 } else {
1312 None
1313 }
1314 }
1315
1316 #[inline]
1317 #[allow(non_snake_case)]
1318 pub fn type_as_date(&self) -> Option<Date<'a>> {
1319 if self.type_type() == Type::Date {
1320 let u = self.type_();
1321 Some(unsafe { Date::init_from_table(u) })
1325 } else {
1326 None
1327 }
1328 }
1329
1330 #[inline]
1331 #[allow(non_snake_case)]
1332 pub fn type_as_time(&self) -> Option<Time<'a>> {
1333 if self.type_type() == Type::Time {
1334 let u = self.type_();
1335 Some(unsafe { Time::init_from_table(u) })
1339 } else {
1340 None
1341 }
1342 }
1343
1344 #[inline]
1345 #[allow(non_snake_case)]
1346 pub fn type_as_timestamp(&self) -> Option<Timestamp<'a>> {
1347 if self.type_type() == Type::Timestamp {
1348 let u = self.type_();
1349 Some(unsafe { Timestamp::init_from_table(u) })
1353 } else {
1354 None
1355 }
1356 }
1357
1358 #[inline]
1359 #[allow(non_snake_case)]
1360 pub fn type_as_interval(&self) -> Option<Interval<'a>> {
1361 if self.type_type() == Type::Interval {
1362 let u = self.type_();
1363 Some(unsafe { Interval::init_from_table(u) })
1367 } else {
1368 None
1369 }
1370 }
1371
1372 #[inline]
1373 #[allow(non_snake_case)]
1374 pub fn type_as_list(&self) -> Option<List<'a>> {
1375 if self.type_type() == Type::List {
1376 let u = self.type_();
1377 Some(unsafe { List::init_from_table(u) })
1381 } else {
1382 None
1383 }
1384 }
1385
1386 #[inline]
1387 #[allow(non_snake_case)]
1388 pub fn type_as_struct_(&self) -> Option<Struct_<'a>> {
1389 if self.type_type() == Type::Struct_ {
1390 let u = self.type_();
1391 Some(unsafe { Struct_::init_from_table(u) })
1395 } else {
1396 None
1397 }
1398 }
1399
1400 #[inline]
1401 #[allow(non_snake_case)]
1402 pub fn type_as_union(&self) -> Option<Union<'a>> {
1403 if self.type_type() == Type::Union {
1404 let u = self.type_();
1405 Some(unsafe { Union::init_from_table(u) })
1409 } else {
1410 None
1411 }
1412 }
1413
1414 #[inline]
1415 #[allow(non_snake_case)]
1416 pub fn type_as_fixed_size_binary(&self) -> Option<FixedSizeBinary<'a>> {
1417 if self.type_type() == Type::FixedSizeBinary {
1418 let u = self.type_();
1419 Some(unsafe { FixedSizeBinary::init_from_table(u) })
1423 } else {
1424 None
1425 }
1426 }
1427
1428 #[inline]
1429 #[allow(non_snake_case)]
1430 pub fn type_as_fixed_size_list(&self) -> Option<FixedSizeList<'a>> {
1431 if self.type_type() == Type::FixedSizeList {
1432 let u = self.type_();
1433 Some(unsafe { FixedSizeList::init_from_table(u) })
1437 } else {
1438 None
1439 }
1440 }
1441
1442 #[inline]
1443 #[allow(non_snake_case)]
1444 pub fn type_as_map(&self) -> Option<Map<'a>> {
1445 if self.type_type() == Type::Map {
1446 let u = self.type_();
1447 Some(unsafe { Map::init_from_table(u) })
1451 } else {
1452 None
1453 }
1454 }
1455
1456 #[inline]
1457 #[allow(non_snake_case)]
1458 pub fn type_as_duration(&self) -> Option<Duration<'a>> {
1459 if self.type_type() == Type::Duration {
1460 let u = self.type_();
1461 Some(unsafe { Duration::init_from_table(u) })
1465 } else {
1466 None
1467 }
1468 }
1469
1470 #[inline]
1471 #[allow(non_snake_case)]
1472 pub fn type_as_large_binary(&self) -> Option<LargeBinary<'a>> {
1473 if self.type_type() == Type::LargeBinary {
1474 let u = self.type_();
1475 Some(unsafe { LargeBinary::init_from_table(u) })
1479 } else {
1480 None
1481 }
1482 }
1483
1484 #[inline]
1485 #[allow(non_snake_case)]
1486 pub fn type_as_large_utf_8(&self) -> Option<LargeUtf8<'a>> {
1487 if self.type_type() == Type::LargeUtf8 {
1488 let u = self.type_();
1489 Some(unsafe { LargeUtf8::init_from_table(u) })
1493 } else {
1494 None
1495 }
1496 }
1497
1498 #[inline]
1499 #[allow(non_snake_case)]
1500 pub fn type_as_large_list(&self) -> Option<LargeList<'a>> {
1501 if self.type_type() == Type::LargeList {
1502 let u = self.type_();
1503 Some(unsafe { LargeList::init_from_table(u) })
1507 } else {
1508 None
1509 }
1510 }
1511
1512 #[inline]
1513 #[allow(non_snake_case)]
1514 pub fn type_as_run_end_encoded(&self) -> Option<RunEndEncoded<'a>> {
1515 if self.type_type() == Type::RunEndEncoded {
1516 let u = self.type_();
1517 Some(unsafe { RunEndEncoded::init_from_table(u) })
1521 } else {
1522 None
1523 }
1524 }
1525
1526 #[inline]
1527 #[allow(non_snake_case)]
1528 pub fn type_as_binary_view(&self) -> Option<BinaryView<'a>> {
1529 if self.type_type() == Type::BinaryView {
1530 let u = self.type_();
1531 Some(unsafe { BinaryView::init_from_table(u) })
1535 } else {
1536 None
1537 }
1538 }
1539
1540 #[inline]
1541 #[allow(non_snake_case)]
1542 pub fn type_as_utf_8_view(&self) -> Option<Utf8View<'a>> {
1543 if self.type_type() == Type::Utf8View {
1544 let u = self.type_();
1545 Some(unsafe { Utf8View::init_from_table(u) })
1549 } else {
1550 None
1551 }
1552 }
1553
1554 #[inline]
1555 #[allow(non_snake_case)]
1556 pub fn type_as_list_view(&self) -> Option<ListView<'a>> {
1557 if self.type_type() == Type::ListView {
1558 let u = self.type_();
1559 Some(unsafe { ListView::init_from_table(u) })
1563 } else {
1564 None
1565 }
1566 }
1567
1568 #[inline]
1569 #[allow(non_snake_case)]
1570 pub fn type_as_large_list_view(&self) -> Option<LargeListView<'a>> {
1571 if self.type_type() == Type::LargeListView {
1572 let u = self.type_();
1573 Some(unsafe { LargeListView::init_from_table(u) })
1577 } else {
1578 None
1579 }
1580 }
1581
1582 #[inline]
1583 #[allow(non_snake_case)]
1584 pub fn sparseIndex_as_sparse_tensor_index_coo(&self) -> Option<SparseTensorIndexCOO<'a>> {
1585 if self.sparseIndex_type() == SparseTensorIndex::SparseTensorIndexCOO {
1586 let u = self.sparseIndex();
1587 Some(unsafe { SparseTensorIndexCOO::init_from_table(u) })
1591 } else {
1592 None
1593 }
1594 }
1595
1596 #[inline]
1597 #[allow(non_snake_case)]
1598 pub fn sparseIndex_as_sparse_matrix_index_csx(&self) -> Option<SparseMatrixIndexCSX<'a>> {
1599 if self.sparseIndex_type() == SparseTensorIndex::SparseMatrixIndexCSX {
1600 let u = self.sparseIndex();
1601 Some(unsafe { SparseMatrixIndexCSX::init_from_table(u) })
1605 } else {
1606 None
1607 }
1608 }
1609
1610 #[inline]
1611 #[allow(non_snake_case)]
1612 pub fn sparseIndex_as_sparse_tensor_index_csf(&self) -> Option<SparseTensorIndexCSF<'a>> {
1613 if self.sparseIndex_type() == SparseTensorIndex::SparseTensorIndexCSF {
1614 let u = self.sparseIndex();
1615 Some(unsafe { SparseTensorIndexCSF::init_from_table(u) })
1619 } else {
1620 None
1621 }
1622 }
1623}
1624
1625impl flatbuffers::Verifiable for SparseTensor<'_> {
1626 #[inline]
1627 fn run_verifier(
1628 v: &mut flatbuffers::Verifier,
1629 pos: usize,
1630 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
1631 use flatbuffers::Verifiable;
1632 v.visit_table(pos)?
1633 .visit_union::<Type, _>(
1634 "type_type",
1635 Self::VT_TYPE_TYPE,
1636 "type_",
1637 Self::VT_TYPE_,
1638 true,
1639 |key, v, pos| match key {
1640 Type::Null => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Null>>(
1641 "Type::Null",
1642 pos,
1643 ),
1644 Type::Int => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Int>>(
1645 "Type::Int",
1646 pos,
1647 ),
1648 Type::FloatingPoint => v
1649 .verify_union_variant::<flatbuffers::ForwardsUOffset<FloatingPoint>>(
1650 "Type::FloatingPoint",
1651 pos,
1652 ),
1653 Type::Binary => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Binary>>(
1654 "Type::Binary",
1655 pos,
1656 ),
1657 Type::Utf8 => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Utf8>>(
1658 "Type::Utf8",
1659 pos,
1660 ),
1661 Type::Bool => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Bool>>(
1662 "Type::Bool",
1663 pos,
1664 ),
1665 Type::Decimal => v
1666 .verify_union_variant::<flatbuffers::ForwardsUOffset<Decimal>>(
1667 "Type::Decimal",
1668 pos,
1669 ),
1670 Type::Date => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Date>>(
1671 "Type::Date",
1672 pos,
1673 ),
1674 Type::Time => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Time>>(
1675 "Type::Time",
1676 pos,
1677 ),
1678 Type::Timestamp => v
1679 .verify_union_variant::<flatbuffers::ForwardsUOffset<Timestamp>>(
1680 "Type::Timestamp",
1681 pos,
1682 ),
1683 Type::Interval => v
1684 .verify_union_variant::<flatbuffers::ForwardsUOffset<Interval>>(
1685 "Type::Interval",
1686 pos,
1687 ),
1688 Type::List => v.verify_union_variant::<flatbuffers::ForwardsUOffset<List>>(
1689 "Type::List",
1690 pos,
1691 ),
1692 Type::Struct_ => v
1693 .verify_union_variant::<flatbuffers::ForwardsUOffset<Struct_>>(
1694 "Type::Struct_",
1695 pos,
1696 ),
1697 Type::Union => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Union>>(
1698 "Type::Union",
1699 pos,
1700 ),
1701 Type::FixedSizeBinary => v
1702 .verify_union_variant::<flatbuffers::ForwardsUOffset<FixedSizeBinary>>(
1703 "Type::FixedSizeBinary",
1704 pos,
1705 ),
1706 Type::FixedSizeList => v
1707 .verify_union_variant::<flatbuffers::ForwardsUOffset<FixedSizeList>>(
1708 "Type::FixedSizeList",
1709 pos,
1710 ),
1711 Type::Map => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Map>>(
1712 "Type::Map",
1713 pos,
1714 ),
1715 Type::Duration => v
1716 .verify_union_variant::<flatbuffers::ForwardsUOffset<Duration>>(
1717 "Type::Duration",
1718 pos,
1719 ),
1720 Type::LargeBinary => v
1721 .verify_union_variant::<flatbuffers::ForwardsUOffset<LargeBinary>>(
1722 "Type::LargeBinary",
1723 pos,
1724 ),
1725 Type::LargeUtf8 => v
1726 .verify_union_variant::<flatbuffers::ForwardsUOffset<LargeUtf8>>(
1727 "Type::LargeUtf8",
1728 pos,
1729 ),
1730 Type::LargeList => v
1731 .verify_union_variant::<flatbuffers::ForwardsUOffset<LargeList>>(
1732 "Type::LargeList",
1733 pos,
1734 ),
1735 Type::RunEndEncoded => v
1736 .verify_union_variant::<flatbuffers::ForwardsUOffset<RunEndEncoded>>(
1737 "Type::RunEndEncoded",
1738 pos,
1739 ),
1740 Type::BinaryView => v
1741 .verify_union_variant::<flatbuffers::ForwardsUOffset<BinaryView>>(
1742 "Type::BinaryView",
1743 pos,
1744 ),
1745 Type::Utf8View => v
1746 .verify_union_variant::<flatbuffers::ForwardsUOffset<Utf8View>>(
1747 "Type::Utf8View",
1748 pos,
1749 ),
1750 Type::ListView => v
1751 .verify_union_variant::<flatbuffers::ForwardsUOffset<ListView>>(
1752 "Type::ListView",
1753 pos,
1754 ),
1755 Type::LargeListView => v
1756 .verify_union_variant::<flatbuffers::ForwardsUOffset<LargeListView>>(
1757 "Type::LargeListView",
1758 pos,
1759 ),
1760 _ => Ok(()),
1761 },
1762 )?
1763 .visit_field::<flatbuffers::ForwardsUOffset<
1764 flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<TensorDim>>,
1765 >>("shape", Self::VT_SHAPE, true)?
1766 .visit_field::<i64>("non_zero_length", Self::VT_NON_ZERO_LENGTH, false)?
1767 .visit_union::<SparseTensorIndex, _>(
1768 "sparseIndex_type",
1769 Self::VT_SPARSEINDEX_TYPE,
1770 "sparseIndex",
1771 Self::VT_SPARSEINDEX,
1772 true,
1773 |key, v, pos| match key {
1774 SparseTensorIndex::SparseTensorIndexCOO => v
1775 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseTensorIndexCOO>>(
1776 "SparseTensorIndex::SparseTensorIndexCOO",
1777 pos,
1778 ),
1779 SparseTensorIndex::SparseMatrixIndexCSX => v
1780 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseMatrixIndexCSX>>(
1781 "SparseTensorIndex::SparseMatrixIndexCSX",
1782 pos,
1783 ),
1784 SparseTensorIndex::SparseTensorIndexCSF => v
1785 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseTensorIndexCSF>>(
1786 "SparseTensorIndex::SparseTensorIndexCSF",
1787 pos,
1788 ),
1789 _ => Ok(()),
1790 },
1791 )?
1792 .visit_field::<Buffer>("data", Self::VT_DATA, true)?
1793 .finish();
1794 Ok(())
1795 }
1796}
1797pub struct SparseTensorArgs<'a> {
1798 pub type_type: Type,
1799 pub type_: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
1800 pub shape: Option<
1801 flatbuffers::WIPOffset<
1802 flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<TensorDim<'a>>>,
1803 >,
1804 >,
1805 pub non_zero_length: i64,
1806 pub sparseIndex_type: SparseTensorIndex,
1807 pub sparseIndex: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
1808 pub data: Option<&'a Buffer>,
1809}
1810impl<'a> Default for SparseTensorArgs<'a> {
1811 #[inline]
1812 fn default() -> Self {
1813 SparseTensorArgs {
1814 type_type: Type::NONE,
1815 type_: None, shape: None, non_zero_length: 0,
1818 sparseIndex_type: SparseTensorIndex::NONE,
1819 sparseIndex: None, data: None, }
1822 }
1823}
1824
1825pub struct SparseTensorBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
1826 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1827 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
1828}
1829impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> SparseTensorBuilder<'a, 'b, A> {
1830 #[inline]
1831 pub fn add_type_type(&mut self, type_type: Type) {
1832 self.fbb_
1833 .push_slot::<Type>(SparseTensor::VT_TYPE_TYPE, type_type, Type::NONE);
1834 }
1835 #[inline]
1836 pub fn add_type_(&mut self, type_: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
1837 self.fbb_
1838 .push_slot_always::<flatbuffers::WIPOffset<_>>(SparseTensor::VT_TYPE_, type_);
1839 }
1840 #[inline]
1841 pub fn add_shape(
1842 &mut self,
1843 shape: flatbuffers::WIPOffset<
1844 flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<TensorDim<'b>>>,
1845 >,
1846 ) {
1847 self.fbb_
1848 .push_slot_always::<flatbuffers::WIPOffset<_>>(SparseTensor::VT_SHAPE, shape);
1849 }
1850 #[inline]
1851 pub fn add_non_zero_length(&mut self, non_zero_length: i64) {
1852 self.fbb_
1853 .push_slot::<i64>(SparseTensor::VT_NON_ZERO_LENGTH, non_zero_length, 0);
1854 }
1855 #[inline]
1856 pub fn add_sparseIndex_type(&mut self, sparseIndex_type: SparseTensorIndex) {
1857 self.fbb_.push_slot::<SparseTensorIndex>(
1858 SparseTensor::VT_SPARSEINDEX_TYPE,
1859 sparseIndex_type,
1860 SparseTensorIndex::NONE,
1861 );
1862 }
1863 #[inline]
1864 pub fn add_sparseIndex(
1865 &mut self,
1866 sparseIndex: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>,
1867 ) {
1868 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1869 SparseTensor::VT_SPARSEINDEX,
1870 sparseIndex,
1871 );
1872 }
1873 #[inline]
1874 pub fn add_data(&mut self, data: &Buffer) {
1875 self.fbb_
1876 .push_slot_always::<&Buffer>(SparseTensor::VT_DATA, data);
1877 }
1878 #[inline]
1879 pub fn new(
1880 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1881 ) -> SparseTensorBuilder<'a, 'b, A> {
1882 let start = _fbb.start_table();
1883 SparseTensorBuilder {
1884 fbb_: _fbb,
1885 start_: start,
1886 }
1887 }
1888 #[inline]
1889 pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensor<'a>> {
1890 let o = self.fbb_.end_table(self.start_);
1891 self.fbb_.required(o, SparseTensor::VT_TYPE_, "type_");
1892 self.fbb_.required(o, SparseTensor::VT_SHAPE, "shape");
1893 self.fbb_
1894 .required(o, SparseTensor::VT_SPARSEINDEX, "sparseIndex");
1895 self.fbb_.required(o, SparseTensor::VT_DATA, "data");
1896 flatbuffers::WIPOffset::new(o.value())
1897 }
1898}
1899
1900impl core::fmt::Debug for SparseTensor<'_> {
1901 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1902 let mut ds = f.debug_struct("SparseTensor");
1903 ds.field("type_type", &self.type_type());
1904 match self.type_type() {
1905 Type::Null => {
1906 if let Some(x) = self.type_as_null() {
1907 ds.field("type_", &x)
1908 } else {
1909 ds.field(
1910 "type_",
1911 &"InvalidFlatbuffer: Union discriminant does not match value.",
1912 )
1913 }
1914 }
1915 Type::Int => {
1916 if let Some(x) = self.type_as_int() {
1917 ds.field("type_", &x)
1918 } else {
1919 ds.field(
1920 "type_",
1921 &"InvalidFlatbuffer: Union discriminant does not match value.",
1922 )
1923 }
1924 }
1925 Type::FloatingPoint => {
1926 if let Some(x) = self.type_as_floating_point() {
1927 ds.field("type_", &x)
1928 } else {
1929 ds.field(
1930 "type_",
1931 &"InvalidFlatbuffer: Union discriminant does not match value.",
1932 )
1933 }
1934 }
1935 Type::Binary => {
1936 if let Some(x) = self.type_as_binary() {
1937 ds.field("type_", &x)
1938 } else {
1939 ds.field(
1940 "type_",
1941 &"InvalidFlatbuffer: Union discriminant does not match value.",
1942 )
1943 }
1944 }
1945 Type::Utf8 => {
1946 if let Some(x) = self.type_as_utf_8() {
1947 ds.field("type_", &x)
1948 } else {
1949 ds.field(
1950 "type_",
1951 &"InvalidFlatbuffer: Union discriminant does not match value.",
1952 )
1953 }
1954 }
1955 Type::Bool => {
1956 if let Some(x) = self.type_as_bool() {
1957 ds.field("type_", &x)
1958 } else {
1959 ds.field(
1960 "type_",
1961 &"InvalidFlatbuffer: Union discriminant does not match value.",
1962 )
1963 }
1964 }
1965 Type::Decimal => {
1966 if let Some(x) = self.type_as_decimal() {
1967 ds.field("type_", &x)
1968 } else {
1969 ds.field(
1970 "type_",
1971 &"InvalidFlatbuffer: Union discriminant does not match value.",
1972 )
1973 }
1974 }
1975 Type::Date => {
1976 if let Some(x) = self.type_as_date() {
1977 ds.field("type_", &x)
1978 } else {
1979 ds.field(
1980 "type_",
1981 &"InvalidFlatbuffer: Union discriminant does not match value.",
1982 )
1983 }
1984 }
1985 Type::Time => {
1986 if let Some(x) = self.type_as_time() {
1987 ds.field("type_", &x)
1988 } else {
1989 ds.field(
1990 "type_",
1991 &"InvalidFlatbuffer: Union discriminant does not match value.",
1992 )
1993 }
1994 }
1995 Type::Timestamp => {
1996 if let Some(x) = self.type_as_timestamp() {
1997 ds.field("type_", &x)
1998 } else {
1999 ds.field(
2000 "type_",
2001 &"InvalidFlatbuffer: Union discriminant does not match value.",
2002 )
2003 }
2004 }
2005 Type::Interval => {
2006 if let Some(x) = self.type_as_interval() {
2007 ds.field("type_", &x)
2008 } else {
2009 ds.field(
2010 "type_",
2011 &"InvalidFlatbuffer: Union discriminant does not match value.",
2012 )
2013 }
2014 }
2015 Type::List => {
2016 if let Some(x) = self.type_as_list() {
2017 ds.field("type_", &x)
2018 } else {
2019 ds.field(
2020 "type_",
2021 &"InvalidFlatbuffer: Union discriminant does not match value.",
2022 )
2023 }
2024 }
2025 Type::Struct_ => {
2026 if let Some(x) = self.type_as_struct_() {
2027 ds.field("type_", &x)
2028 } else {
2029 ds.field(
2030 "type_",
2031 &"InvalidFlatbuffer: Union discriminant does not match value.",
2032 )
2033 }
2034 }
2035 Type::Union => {
2036 if let Some(x) = self.type_as_union() {
2037 ds.field("type_", &x)
2038 } else {
2039 ds.field(
2040 "type_",
2041 &"InvalidFlatbuffer: Union discriminant does not match value.",
2042 )
2043 }
2044 }
2045 Type::FixedSizeBinary => {
2046 if let Some(x) = self.type_as_fixed_size_binary() {
2047 ds.field("type_", &x)
2048 } else {
2049 ds.field(
2050 "type_",
2051 &"InvalidFlatbuffer: Union discriminant does not match value.",
2052 )
2053 }
2054 }
2055 Type::FixedSizeList => {
2056 if let Some(x) = self.type_as_fixed_size_list() {
2057 ds.field("type_", &x)
2058 } else {
2059 ds.field(
2060 "type_",
2061 &"InvalidFlatbuffer: Union discriminant does not match value.",
2062 )
2063 }
2064 }
2065 Type::Map => {
2066 if let Some(x) = self.type_as_map() {
2067 ds.field("type_", &x)
2068 } else {
2069 ds.field(
2070 "type_",
2071 &"InvalidFlatbuffer: Union discriminant does not match value.",
2072 )
2073 }
2074 }
2075 Type::Duration => {
2076 if let Some(x) = self.type_as_duration() {
2077 ds.field("type_", &x)
2078 } else {
2079 ds.field(
2080 "type_",
2081 &"InvalidFlatbuffer: Union discriminant does not match value.",
2082 )
2083 }
2084 }
2085 Type::LargeBinary => {
2086 if let Some(x) = self.type_as_large_binary() {
2087 ds.field("type_", &x)
2088 } else {
2089 ds.field(
2090 "type_",
2091 &"InvalidFlatbuffer: Union discriminant does not match value.",
2092 )
2093 }
2094 }
2095 Type::LargeUtf8 => {
2096 if let Some(x) = self.type_as_large_utf_8() {
2097 ds.field("type_", &x)
2098 } else {
2099 ds.field(
2100 "type_",
2101 &"InvalidFlatbuffer: Union discriminant does not match value.",
2102 )
2103 }
2104 }
2105 Type::LargeList => {
2106 if let Some(x) = self.type_as_large_list() {
2107 ds.field("type_", &x)
2108 } else {
2109 ds.field(
2110 "type_",
2111 &"InvalidFlatbuffer: Union discriminant does not match value.",
2112 )
2113 }
2114 }
2115 Type::RunEndEncoded => {
2116 if let Some(x) = self.type_as_run_end_encoded() {
2117 ds.field("type_", &x)
2118 } else {
2119 ds.field(
2120 "type_",
2121 &"InvalidFlatbuffer: Union discriminant does not match value.",
2122 )
2123 }
2124 }
2125 Type::BinaryView => {
2126 if let Some(x) = self.type_as_binary_view() {
2127 ds.field("type_", &x)
2128 } else {
2129 ds.field(
2130 "type_",
2131 &"InvalidFlatbuffer: Union discriminant does not match value.",
2132 )
2133 }
2134 }
2135 Type::Utf8View => {
2136 if let Some(x) = self.type_as_utf_8_view() {
2137 ds.field("type_", &x)
2138 } else {
2139 ds.field(
2140 "type_",
2141 &"InvalidFlatbuffer: Union discriminant does not match value.",
2142 )
2143 }
2144 }
2145 Type::ListView => {
2146 if let Some(x) = self.type_as_list_view() {
2147 ds.field("type_", &x)
2148 } else {
2149 ds.field(
2150 "type_",
2151 &"InvalidFlatbuffer: Union discriminant does not match value.",
2152 )
2153 }
2154 }
2155 Type::LargeListView => {
2156 if let Some(x) = self.type_as_large_list_view() {
2157 ds.field("type_", &x)
2158 } else {
2159 ds.field(
2160 "type_",
2161 &"InvalidFlatbuffer: Union discriminant does not match value.",
2162 )
2163 }
2164 }
2165 _ => {
2166 let x: Option<()> = None;
2167 ds.field("type_", &x)
2168 }
2169 };
2170 ds.field("shape", &self.shape());
2171 ds.field("non_zero_length", &self.non_zero_length());
2172 ds.field("sparseIndex_type", &self.sparseIndex_type());
2173 match self.sparseIndex_type() {
2174 SparseTensorIndex::SparseTensorIndexCOO => {
2175 if let Some(x) = self.sparseIndex_as_sparse_tensor_index_coo() {
2176 ds.field("sparseIndex", &x)
2177 } else {
2178 ds.field(
2179 "sparseIndex",
2180 &"InvalidFlatbuffer: Union discriminant does not match value.",
2181 )
2182 }
2183 }
2184 SparseTensorIndex::SparseMatrixIndexCSX => {
2185 if let Some(x) = self.sparseIndex_as_sparse_matrix_index_csx() {
2186 ds.field("sparseIndex", &x)
2187 } else {
2188 ds.field(
2189 "sparseIndex",
2190 &"InvalidFlatbuffer: Union discriminant does not match value.",
2191 )
2192 }
2193 }
2194 SparseTensorIndex::SparseTensorIndexCSF => {
2195 if let Some(x) = self.sparseIndex_as_sparse_tensor_index_csf() {
2196 ds.field("sparseIndex", &x)
2197 } else {
2198 ds.field(
2199 "sparseIndex",
2200 &"InvalidFlatbuffer: Union discriminant does not match value.",
2201 )
2202 }
2203 }
2204 _ => {
2205 let x: Option<()> = None;
2206 ds.field("sparseIndex", &x)
2207 }
2208 };
2209 ds.field("data", &self.data());
2210 ds.finish()
2211 }
2212}
2213#[inline]
2214pub fn root_as_sparse_tensor(buf: &[u8]) -> Result<SparseTensor, flatbuffers::InvalidFlatbuffer> {
2221 flatbuffers::root::<SparseTensor>(buf)
2222}
2223#[inline]
2224pub fn size_prefixed_root_as_sparse_tensor(
2231 buf: &[u8],
2232) -> Result<SparseTensor, flatbuffers::InvalidFlatbuffer> {
2233 flatbuffers::size_prefixed_root::<SparseTensor>(buf)
2234}
2235#[inline]
2236pub fn root_as_sparse_tensor_with_opts<'b, 'o>(
2243 opts: &'o flatbuffers::VerifierOptions,
2244 buf: &'b [u8],
2245) -> Result<SparseTensor<'b>, flatbuffers::InvalidFlatbuffer> {
2246 flatbuffers::root_with_opts::<SparseTensor<'b>>(opts, buf)
2247}
2248#[inline]
2249pub fn size_prefixed_root_as_sparse_tensor_with_opts<'b, 'o>(
2256 opts: &'o flatbuffers::VerifierOptions,
2257 buf: &'b [u8],
2258) -> Result<SparseTensor<'b>, flatbuffers::InvalidFlatbuffer> {
2259 flatbuffers::size_prefixed_root_with_opts::<SparseTensor<'b>>(opts, buf)
2260}
2261#[inline]
2262pub unsafe fn root_as_sparse_tensor_unchecked(buf: &[u8]) -> SparseTensor {
2266 flatbuffers::root_unchecked::<SparseTensor>(buf)
2267}
2268#[inline]
2269pub unsafe fn size_prefixed_root_as_sparse_tensor_unchecked(buf: &[u8]) -> SparseTensor {
2273 flatbuffers::size_prefixed_root_unchecked::<SparseTensor>(buf)
2274}
2275#[inline]
2276pub fn finish_sparse_tensor_buffer<'a, 'b, A: flatbuffers::Allocator + 'a>(
2277 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
2278 root: flatbuffers::WIPOffset<SparseTensor<'a>>,
2279) {
2280 fbb.finish(root, None);
2281}
2282
2283#[inline]
2284pub fn finish_size_prefixed_sparse_tensor_buffer<'a, 'b, A: flatbuffers::Allocator + 'a>(
2285 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
2286 root: flatbuffers::WIPOffset<SparseTensor<'a>>,
2287) {
2288 fbb.finish_size_prefixed(root, None);
2289}