1#![allow(dead_code)]
19#![allow(unused_imports)]
20
21use crate::gen::Schema::*;
22use crate::gen::Tensor::*;
23use flatbuffers::EndianScalar;
24use std::{cmp::Ordering, mem};
25#[deprecated(
28 since = "2.0.0",
29 note = "Use associated constants instead. This will no longer be generated in 2021."
30)]
31pub const ENUM_MIN_SPARSE_MATRIX_COMPRESSED_AXIS: i16 = 0;
32#[deprecated(
33 since = "2.0.0",
34 note = "Use associated constants instead. This will no longer be generated in 2021."
35)]
36pub const ENUM_MAX_SPARSE_MATRIX_COMPRESSED_AXIS: i16 = 1;
37#[deprecated(
38 since = "2.0.0",
39 note = "Use associated constants instead. This will no longer be generated in 2021."
40)]
41#[allow(non_camel_case_types)]
42pub const ENUM_VALUES_SPARSE_MATRIX_COMPRESSED_AXIS: [SparseMatrixCompressedAxis; 2] = [
43 SparseMatrixCompressedAxis::Row,
44 SparseMatrixCompressedAxis::Column,
45];
46
47#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
48#[repr(transparent)]
49pub struct SparseMatrixCompressedAxis(pub i16);
50#[allow(non_upper_case_globals)]
51impl SparseMatrixCompressedAxis {
52 pub const Row: Self = Self(0);
53 pub const Column: Self = Self(1);
54
55 pub const ENUM_MIN: i16 = 0;
56 pub const ENUM_MAX: i16 = 1;
57 pub const ENUM_VALUES: &'static [Self] = &[Self::Row, Self::Column];
58 pub fn variant_name(self) -> Option<&'static str> {
60 match self {
61 Self::Row => Some("Row"),
62 Self::Column => Some("Column"),
63 _ => None,
64 }
65 }
66}
67impl core::fmt::Debug for SparseMatrixCompressedAxis {
68 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
69 if let Some(name) = self.variant_name() {
70 f.write_str(name)
71 } else {
72 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
73 }
74 }
75}
76impl<'a> flatbuffers::Follow<'a> for SparseMatrixCompressedAxis {
77 type Inner = Self;
78 #[inline]
79 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
80 let b = flatbuffers::read_scalar_at::<i16>(buf, loc);
81 Self(b)
82 }
83}
84
85impl flatbuffers::Push for SparseMatrixCompressedAxis {
86 type Output = SparseMatrixCompressedAxis;
87 #[inline]
88 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
89 flatbuffers::emplace_scalar::<i16>(dst, self.0);
90 }
91}
92
93impl flatbuffers::EndianScalar for SparseMatrixCompressedAxis {
94 type Scalar = i16;
95 #[inline]
96 fn to_little_endian(self) -> i16 {
97 self.0.to_le()
98 }
99 #[inline]
100 #[allow(clippy::wrong_self_convention)]
101 fn from_little_endian(v: i16) -> Self {
102 let b = i16::from_le(v);
103 Self(b)
104 }
105}
106
107impl<'a> flatbuffers::Verifiable for SparseMatrixCompressedAxis {
108 #[inline]
109 fn run_verifier(
110 v: &mut flatbuffers::Verifier,
111 pos: usize,
112 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
113 use flatbuffers::Verifiable;
114 i16::run_verifier(v, pos)
115 }
116}
117
118impl flatbuffers::SimpleToVerifyInSlice for SparseMatrixCompressedAxis {}
119#[deprecated(
120 since = "2.0.0",
121 note = "Use associated constants instead. This will no longer be generated in 2021."
122)]
123pub const ENUM_MIN_SPARSE_TENSOR_INDEX: u8 = 0;
124#[deprecated(
125 since = "2.0.0",
126 note = "Use associated constants instead. This will no longer be generated in 2021."
127)]
128pub const ENUM_MAX_SPARSE_TENSOR_INDEX: u8 = 3;
129#[deprecated(
130 since = "2.0.0",
131 note = "Use associated constants instead. This will no longer be generated in 2021."
132)]
133#[allow(non_camel_case_types)]
134pub const ENUM_VALUES_SPARSE_TENSOR_INDEX: [SparseTensorIndex; 4] = [
135 SparseTensorIndex::NONE,
136 SparseTensorIndex::SparseTensorIndexCOO,
137 SparseTensorIndex::SparseMatrixIndexCSX,
138 SparseTensorIndex::SparseTensorIndexCSF,
139];
140
141#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
142#[repr(transparent)]
143pub struct SparseTensorIndex(pub u8);
144#[allow(non_upper_case_globals)]
145impl SparseTensorIndex {
146 pub const NONE: Self = Self(0);
147 pub const SparseTensorIndexCOO: Self = Self(1);
148 pub const SparseMatrixIndexCSX: Self = Self(2);
149 pub const SparseTensorIndexCSF: Self = Self(3);
150
151 pub const ENUM_MIN: u8 = 0;
152 pub const ENUM_MAX: u8 = 3;
153 pub const ENUM_VALUES: &'static [Self] = &[
154 Self::NONE,
155 Self::SparseTensorIndexCOO,
156 Self::SparseMatrixIndexCSX,
157 Self::SparseTensorIndexCSF,
158 ];
159 pub fn variant_name(self) -> Option<&'static str> {
161 match self {
162 Self::NONE => Some("NONE"),
163 Self::SparseTensorIndexCOO => Some("SparseTensorIndexCOO"),
164 Self::SparseMatrixIndexCSX => Some("SparseMatrixIndexCSX"),
165 Self::SparseTensorIndexCSF => Some("SparseTensorIndexCSF"),
166 _ => None,
167 }
168 }
169}
170impl core::fmt::Debug for SparseTensorIndex {
171 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
172 if let Some(name) = self.variant_name() {
173 f.write_str(name)
174 } else {
175 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
176 }
177 }
178}
179impl<'a> flatbuffers::Follow<'a> for SparseTensorIndex {
180 type Inner = Self;
181 #[inline]
182 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
183 let b = flatbuffers::read_scalar_at::<u8>(buf, loc);
184 Self(b)
185 }
186}
187
188impl flatbuffers::Push for SparseTensorIndex {
189 type Output = SparseTensorIndex;
190 #[inline]
191 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
192 flatbuffers::emplace_scalar::<u8>(dst, self.0);
193 }
194}
195
196impl flatbuffers::EndianScalar for SparseTensorIndex {
197 type Scalar = u8;
198 #[inline]
199 fn to_little_endian(self) -> u8 {
200 self.0.to_le()
201 }
202 #[inline]
203 #[allow(clippy::wrong_self_convention)]
204 fn from_little_endian(v: u8) -> Self {
205 let b = u8::from_le(v);
206 Self(b)
207 }
208}
209
210impl<'a> flatbuffers::Verifiable for SparseTensorIndex {
211 #[inline]
212 fn run_verifier(
213 v: &mut flatbuffers::Verifier,
214 pos: usize,
215 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
216 use flatbuffers::Verifiable;
217 u8::run_verifier(v, pos)
218 }
219}
220
221impl flatbuffers::SimpleToVerifyInSlice for SparseTensorIndex {}
222pub struct SparseTensorIndexUnionTableOffset {}
223
224pub enum SparseTensorIndexCOOOffset {}
225#[derive(Copy, Clone, PartialEq)]
226
227pub struct SparseTensorIndexCOO<'a> {
260 pub _tab: flatbuffers::Table<'a>,
261}
262
263impl<'a> flatbuffers::Follow<'a> for SparseTensorIndexCOO<'a> {
264 type Inner = SparseTensorIndexCOO<'a>;
265 #[inline]
266 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
267 Self {
268 _tab: flatbuffers::Table::new(buf, loc),
269 }
270 }
271}
272
273impl<'a> SparseTensorIndexCOO<'a> {
274 pub const VT_INDICESTYPE: flatbuffers::VOffsetT = 4;
275 pub const VT_INDICESSTRIDES: flatbuffers::VOffsetT = 6;
276 pub const VT_INDICESBUFFER: flatbuffers::VOffsetT = 8;
277 pub const VT_ISCANONICAL: flatbuffers::VOffsetT = 10;
278
279 #[inline]
280 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
281 SparseTensorIndexCOO { _tab: table }
282 }
283 #[allow(unused_mut)]
284 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
285 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
286 args: &'args SparseTensorIndexCOOArgs<'args>,
287 ) -> flatbuffers::WIPOffset<SparseTensorIndexCOO<'bldr>> {
288 let mut builder = SparseTensorIndexCOOBuilder::new(_fbb);
289 if let Some(x) = args.indicesBuffer {
290 builder.add_indicesBuffer(x);
291 }
292 if let Some(x) = args.indicesStrides {
293 builder.add_indicesStrides(x);
294 }
295 if let Some(x) = args.indicesType {
296 builder.add_indicesType(x);
297 }
298 builder.add_isCanonical(args.isCanonical);
299 builder.finish()
300 }
301
302 #[inline]
304 pub fn indicesType(&self) -> Int<'a> {
305 unsafe {
309 self._tab
310 .get::<flatbuffers::ForwardsUOffset<Int>>(
311 SparseTensorIndexCOO::VT_INDICESTYPE,
312 None,
313 )
314 .unwrap()
315 }
316 }
317 #[inline]
320 pub fn indicesStrides(&self) -> Option<flatbuffers::Vector<'a, i64>> {
321 unsafe {
325 self._tab
326 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i64>>>(
327 SparseTensorIndexCOO::VT_INDICESSTRIDES,
328 None,
329 )
330 }
331 }
332 #[inline]
334 pub fn indicesBuffer(&self) -> &'a Buffer {
335 unsafe {
339 self._tab
340 .get::<Buffer>(SparseTensorIndexCOO::VT_INDICESBUFFER, None)
341 .unwrap()
342 }
343 }
344 #[inline]
350 pub fn isCanonical(&self) -> bool {
351 unsafe {
355 self._tab
356 .get::<bool>(SparseTensorIndexCOO::VT_ISCANONICAL, Some(false))
357 .unwrap()
358 }
359 }
360}
361
362impl flatbuffers::Verifiable for SparseTensorIndexCOO<'_> {
363 #[inline]
364 fn run_verifier(
365 v: &mut flatbuffers::Verifier,
366 pos: usize,
367 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
368 use flatbuffers::Verifiable;
369 v.visit_table(pos)?
370 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
371 "indicesType",
372 Self::VT_INDICESTYPE,
373 true,
374 )?
375 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, i64>>>(
376 "indicesStrides",
377 Self::VT_INDICESSTRIDES,
378 false,
379 )?
380 .visit_field::<Buffer>("indicesBuffer", Self::VT_INDICESBUFFER, true)?
381 .visit_field::<bool>("isCanonical", Self::VT_ISCANONICAL, false)?
382 .finish();
383 Ok(())
384 }
385}
386pub struct SparseTensorIndexCOOArgs<'a> {
387 pub indicesType: Option<flatbuffers::WIPOffset<Int<'a>>>,
388 pub indicesStrides: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, i64>>>,
389 pub indicesBuffer: Option<&'a Buffer>,
390 pub isCanonical: bool,
391}
392impl<'a> Default for SparseTensorIndexCOOArgs<'a> {
393 #[inline]
394 fn default() -> Self {
395 SparseTensorIndexCOOArgs {
396 indicesType: None, indicesStrides: None,
398 indicesBuffer: None, isCanonical: false,
400 }
401 }
402}
403
404pub struct SparseTensorIndexCOOBuilder<'a: 'b, 'b> {
405 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
406 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
407}
408impl<'a: 'b, 'b> SparseTensorIndexCOOBuilder<'a, 'b> {
409 #[inline]
410 pub fn add_indicesType(&mut self, indicesType: flatbuffers::WIPOffset<Int<'b>>) {
411 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
412 SparseTensorIndexCOO::VT_INDICESTYPE,
413 indicesType,
414 );
415 }
416 #[inline]
417 pub fn add_indicesStrides(
418 &mut self,
419 indicesStrides: flatbuffers::WIPOffset<flatbuffers::Vector<'b, i64>>,
420 ) {
421 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
422 SparseTensorIndexCOO::VT_INDICESSTRIDES,
423 indicesStrides,
424 );
425 }
426 #[inline]
427 pub fn add_indicesBuffer(&mut self, indicesBuffer: &Buffer) {
428 self.fbb_
429 .push_slot_always::<&Buffer>(SparseTensorIndexCOO::VT_INDICESBUFFER, indicesBuffer);
430 }
431 #[inline]
432 pub fn add_isCanonical(&mut self, isCanonical: bool) {
433 self.fbb_
434 .push_slot::<bool>(SparseTensorIndexCOO::VT_ISCANONICAL, isCanonical, false);
435 }
436 #[inline]
437 pub fn new(
438 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
439 ) -> SparseTensorIndexCOOBuilder<'a, 'b> {
440 let start = _fbb.start_table();
441 SparseTensorIndexCOOBuilder {
442 fbb_: _fbb,
443 start_: start,
444 }
445 }
446 #[inline]
447 pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensorIndexCOO<'a>> {
448 let o = self.fbb_.end_table(self.start_);
449 self.fbb_
450 .required(o, SparseTensorIndexCOO::VT_INDICESTYPE, "indicesType");
451 self.fbb_
452 .required(o, SparseTensorIndexCOO::VT_INDICESBUFFER, "indicesBuffer");
453 flatbuffers::WIPOffset::new(o.value())
454 }
455}
456
457impl core::fmt::Debug for SparseTensorIndexCOO<'_> {
458 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
459 let mut ds = f.debug_struct("SparseTensorIndexCOO");
460 ds.field("indicesType", &self.indicesType());
461 ds.field("indicesStrides", &self.indicesStrides());
462 ds.field("indicesBuffer", &self.indicesBuffer());
463 ds.field("isCanonical", &self.isCanonical());
464 ds.finish()
465 }
466}
467pub enum SparseMatrixIndexCSXOffset {}
468#[derive(Copy, Clone, PartialEq)]
469
470pub struct SparseMatrixIndexCSX<'a> {
472 pub _tab: flatbuffers::Table<'a>,
473}
474
475impl<'a> flatbuffers::Follow<'a> for SparseMatrixIndexCSX<'a> {
476 type Inner = SparseMatrixIndexCSX<'a>;
477 #[inline]
478 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
479 Self {
480 _tab: flatbuffers::Table::new(buf, loc),
481 }
482 }
483}
484
485impl<'a> SparseMatrixIndexCSX<'a> {
486 pub const VT_COMPRESSEDAXIS: flatbuffers::VOffsetT = 4;
487 pub const VT_INDPTRTYPE: flatbuffers::VOffsetT = 6;
488 pub const VT_INDPTRBUFFER: flatbuffers::VOffsetT = 8;
489 pub const VT_INDICESTYPE: flatbuffers::VOffsetT = 10;
490 pub const VT_INDICESBUFFER: flatbuffers::VOffsetT = 12;
491
492 #[inline]
493 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
494 SparseMatrixIndexCSX { _tab: table }
495 }
496 #[allow(unused_mut)]
497 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
498 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
499 args: &'args SparseMatrixIndexCSXArgs<'args>,
500 ) -> flatbuffers::WIPOffset<SparseMatrixIndexCSX<'bldr>> {
501 let mut builder = SparseMatrixIndexCSXBuilder::new(_fbb);
502 if let Some(x) = args.indicesBuffer {
503 builder.add_indicesBuffer(x);
504 }
505 if let Some(x) = args.indicesType {
506 builder.add_indicesType(x);
507 }
508 if let Some(x) = args.indptrBuffer {
509 builder.add_indptrBuffer(x);
510 }
511 if let Some(x) = args.indptrType {
512 builder.add_indptrType(x);
513 }
514 builder.add_compressedAxis(args.compressedAxis);
515 builder.finish()
516 }
517
518 #[inline]
520 pub fn compressedAxis(&self) -> SparseMatrixCompressedAxis {
521 unsafe {
525 self._tab
526 .get::<SparseMatrixCompressedAxis>(
527 SparseMatrixIndexCSX::VT_COMPRESSEDAXIS,
528 Some(SparseMatrixCompressedAxis::Row),
529 )
530 .unwrap()
531 }
532 }
533 #[inline]
535 pub fn indptrType(&self) -> Int<'a> {
536 unsafe {
540 self._tab
541 .get::<flatbuffers::ForwardsUOffset<Int>>(SparseMatrixIndexCSX::VT_INDPTRTYPE, None)
542 .unwrap()
543 }
544 }
545 #[inline]
569 pub fn indptrBuffer(&self) -> &'a Buffer {
570 unsafe {
574 self._tab
575 .get::<Buffer>(SparseMatrixIndexCSX::VT_INDPTRBUFFER, None)
576 .unwrap()
577 }
578 }
579 #[inline]
581 pub fn indicesType(&self) -> Int<'a> {
582 unsafe {
586 self._tab
587 .get::<flatbuffers::ForwardsUOffset<Int>>(
588 SparseMatrixIndexCSX::VT_INDICESTYPE,
589 None,
590 )
591 .unwrap()
592 }
593 }
594 #[inline]
604 pub fn indicesBuffer(&self) -> &'a Buffer {
605 unsafe {
609 self._tab
610 .get::<Buffer>(SparseMatrixIndexCSX::VT_INDICESBUFFER, None)
611 .unwrap()
612 }
613 }
614}
615
616impl flatbuffers::Verifiable for SparseMatrixIndexCSX<'_> {
617 #[inline]
618 fn run_verifier(
619 v: &mut flatbuffers::Verifier,
620 pos: usize,
621 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
622 use flatbuffers::Verifiable;
623 v.visit_table(pos)?
624 .visit_field::<SparseMatrixCompressedAxis>(
625 "compressedAxis",
626 Self::VT_COMPRESSEDAXIS,
627 false,
628 )?
629 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
630 "indptrType",
631 Self::VT_INDPTRTYPE,
632 true,
633 )?
634 .visit_field::<Buffer>("indptrBuffer", Self::VT_INDPTRBUFFER, true)?
635 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
636 "indicesType",
637 Self::VT_INDICESTYPE,
638 true,
639 )?
640 .visit_field::<Buffer>("indicesBuffer", Self::VT_INDICESBUFFER, true)?
641 .finish();
642 Ok(())
643 }
644}
645pub struct SparseMatrixIndexCSXArgs<'a> {
646 pub compressedAxis: SparseMatrixCompressedAxis,
647 pub indptrType: Option<flatbuffers::WIPOffset<Int<'a>>>,
648 pub indptrBuffer: Option<&'a Buffer>,
649 pub indicesType: Option<flatbuffers::WIPOffset<Int<'a>>>,
650 pub indicesBuffer: Option<&'a Buffer>,
651}
652impl<'a> Default for SparseMatrixIndexCSXArgs<'a> {
653 #[inline]
654 fn default() -> Self {
655 SparseMatrixIndexCSXArgs {
656 compressedAxis: SparseMatrixCompressedAxis::Row,
657 indptrType: None, indptrBuffer: None, indicesType: None, indicesBuffer: None, }
662 }
663}
664
665pub struct SparseMatrixIndexCSXBuilder<'a: 'b, 'b> {
666 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
667 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
668}
669impl<'a: 'b, 'b> SparseMatrixIndexCSXBuilder<'a, 'b> {
670 #[inline]
671 pub fn add_compressedAxis(&mut self, compressedAxis: SparseMatrixCompressedAxis) {
672 self.fbb_.push_slot::<SparseMatrixCompressedAxis>(
673 SparseMatrixIndexCSX::VT_COMPRESSEDAXIS,
674 compressedAxis,
675 SparseMatrixCompressedAxis::Row,
676 );
677 }
678 #[inline]
679 pub fn add_indptrType(&mut self, indptrType: flatbuffers::WIPOffset<Int<'b>>) {
680 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
681 SparseMatrixIndexCSX::VT_INDPTRTYPE,
682 indptrType,
683 );
684 }
685 #[inline]
686 pub fn add_indptrBuffer(&mut self, indptrBuffer: &Buffer) {
687 self.fbb_
688 .push_slot_always::<&Buffer>(SparseMatrixIndexCSX::VT_INDPTRBUFFER, indptrBuffer);
689 }
690 #[inline]
691 pub fn add_indicesType(&mut self, indicesType: flatbuffers::WIPOffset<Int<'b>>) {
692 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
693 SparseMatrixIndexCSX::VT_INDICESTYPE,
694 indicesType,
695 );
696 }
697 #[inline]
698 pub fn add_indicesBuffer(&mut self, indicesBuffer: &Buffer) {
699 self.fbb_
700 .push_slot_always::<&Buffer>(SparseMatrixIndexCSX::VT_INDICESBUFFER, indicesBuffer);
701 }
702 #[inline]
703 pub fn new(
704 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
705 ) -> SparseMatrixIndexCSXBuilder<'a, 'b> {
706 let start = _fbb.start_table();
707 SparseMatrixIndexCSXBuilder {
708 fbb_: _fbb,
709 start_: start,
710 }
711 }
712 #[inline]
713 pub fn finish(self) -> flatbuffers::WIPOffset<SparseMatrixIndexCSX<'a>> {
714 let o = self.fbb_.end_table(self.start_);
715 self.fbb_
716 .required(o, SparseMatrixIndexCSX::VT_INDPTRTYPE, "indptrType");
717 self.fbb_
718 .required(o, SparseMatrixIndexCSX::VT_INDPTRBUFFER, "indptrBuffer");
719 self.fbb_
720 .required(o, SparseMatrixIndexCSX::VT_INDICESTYPE, "indicesType");
721 self.fbb_
722 .required(o, SparseMatrixIndexCSX::VT_INDICESBUFFER, "indicesBuffer");
723 flatbuffers::WIPOffset::new(o.value())
724 }
725}
726
727impl core::fmt::Debug for SparseMatrixIndexCSX<'_> {
728 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
729 let mut ds = f.debug_struct("SparseMatrixIndexCSX");
730 ds.field("compressedAxis", &self.compressedAxis());
731 ds.field("indptrType", &self.indptrType());
732 ds.field("indptrBuffer", &self.indptrBuffer());
733 ds.field("indicesType", &self.indicesType());
734 ds.field("indicesBuffer", &self.indicesBuffer());
735 ds.finish()
736 }
737}
738pub enum SparseTensorIndexCSFOffset {}
739#[derive(Copy, Clone, PartialEq)]
740
741pub struct SparseTensorIndexCSF<'a> {
743 pub _tab: flatbuffers::Table<'a>,
744}
745
746impl<'a> flatbuffers::Follow<'a> for SparseTensorIndexCSF<'a> {
747 type Inner = SparseTensorIndexCSF<'a>;
748 #[inline]
749 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
750 Self {
751 _tab: flatbuffers::Table::new(buf, loc),
752 }
753 }
754}
755
756impl<'a> SparseTensorIndexCSF<'a> {
757 pub const VT_INDPTRTYPE: flatbuffers::VOffsetT = 4;
758 pub const VT_INDPTRBUFFERS: flatbuffers::VOffsetT = 6;
759 pub const VT_INDICESTYPE: flatbuffers::VOffsetT = 8;
760 pub const VT_INDICESBUFFERS: flatbuffers::VOffsetT = 10;
761 pub const VT_AXISORDER: flatbuffers::VOffsetT = 12;
762
763 #[inline]
764 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
765 SparseTensorIndexCSF { _tab: table }
766 }
767 #[allow(unused_mut)]
768 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
769 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
770 args: &'args SparseTensorIndexCSFArgs<'args>,
771 ) -> flatbuffers::WIPOffset<SparseTensorIndexCSF<'bldr>> {
772 let mut builder = SparseTensorIndexCSFBuilder::new(_fbb);
773 if let Some(x) = args.axisOrder {
774 builder.add_axisOrder(x);
775 }
776 if let Some(x) = args.indicesBuffers {
777 builder.add_indicesBuffers(x);
778 }
779 if let Some(x) = args.indicesType {
780 builder.add_indicesType(x);
781 }
782 if let Some(x) = args.indptrBuffers {
783 builder.add_indptrBuffers(x);
784 }
785 if let Some(x) = args.indptrType {
786 builder.add_indptrType(x);
787 }
788 builder.finish()
789 }
790
791 #[inline]
823 pub fn indptrType(&self) -> Int<'a> {
824 unsafe {
828 self._tab
829 .get::<flatbuffers::ForwardsUOffset<Int>>(SparseTensorIndexCSF::VT_INDPTRTYPE, None)
830 .unwrap()
831 }
832 }
833 #[inline]
848 pub fn indptrBuffers(&self) -> flatbuffers::Vector<'a, Buffer> {
849 unsafe {
853 self._tab
854 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Buffer>>>(
855 SparseTensorIndexCSF::VT_INDPTRBUFFERS,
856 None,
857 )
858 .unwrap()
859 }
860 }
861 #[inline]
863 pub fn indicesType(&self) -> Int<'a> {
864 unsafe {
868 self._tab
869 .get::<flatbuffers::ForwardsUOffset<Int>>(
870 SparseTensorIndexCSF::VT_INDICESTYPE,
871 None,
872 )
873 .unwrap()
874 }
875 }
876 #[inline]
888 pub fn indicesBuffers(&self) -> flatbuffers::Vector<'a, Buffer> {
889 unsafe {
893 self._tab
894 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Buffer>>>(
895 SparseTensorIndexCSF::VT_INDICESBUFFERS,
896 None,
897 )
898 .unwrap()
899 }
900 }
901 #[inline]
908 pub fn axisOrder(&self) -> flatbuffers::Vector<'a, i32> {
909 unsafe {
913 self._tab
914 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i32>>>(
915 SparseTensorIndexCSF::VT_AXISORDER,
916 None,
917 )
918 .unwrap()
919 }
920 }
921}
922
923impl flatbuffers::Verifiable for SparseTensorIndexCSF<'_> {
924 #[inline]
925 fn run_verifier(
926 v: &mut flatbuffers::Verifier,
927 pos: usize,
928 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
929 use flatbuffers::Verifiable;
930 v.visit_table(pos)?
931 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
932 "indptrType",
933 Self::VT_INDPTRTYPE,
934 true,
935 )?
936 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Buffer>>>(
937 "indptrBuffers",
938 Self::VT_INDPTRBUFFERS,
939 true,
940 )?
941 .visit_field::<flatbuffers::ForwardsUOffset<Int>>(
942 "indicesType",
943 Self::VT_INDICESTYPE,
944 true,
945 )?
946 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Buffer>>>(
947 "indicesBuffers",
948 Self::VT_INDICESBUFFERS,
949 true,
950 )?
951 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, i32>>>(
952 "axisOrder",
953 Self::VT_AXISORDER,
954 true,
955 )?
956 .finish();
957 Ok(())
958 }
959}
960pub struct SparseTensorIndexCSFArgs<'a> {
961 pub indptrType: Option<flatbuffers::WIPOffset<Int<'a>>>,
962 pub indptrBuffers: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Buffer>>>,
963 pub indicesType: Option<flatbuffers::WIPOffset<Int<'a>>>,
964 pub indicesBuffers: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Buffer>>>,
965 pub axisOrder: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, i32>>>,
966}
967impl<'a> Default for SparseTensorIndexCSFArgs<'a> {
968 #[inline]
969 fn default() -> Self {
970 SparseTensorIndexCSFArgs {
971 indptrType: None, indptrBuffers: None, indicesType: None, indicesBuffers: None, axisOrder: None, }
977 }
978}
979
980pub struct SparseTensorIndexCSFBuilder<'a: 'b, 'b> {
981 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
982 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
983}
984impl<'a: 'b, 'b> SparseTensorIndexCSFBuilder<'a, 'b> {
985 #[inline]
986 pub fn add_indptrType(&mut self, indptrType: flatbuffers::WIPOffset<Int<'b>>) {
987 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
988 SparseTensorIndexCSF::VT_INDPTRTYPE,
989 indptrType,
990 );
991 }
992 #[inline]
993 pub fn add_indptrBuffers(
994 &mut self,
995 indptrBuffers: flatbuffers::WIPOffset<flatbuffers::Vector<'b, Buffer>>,
996 ) {
997 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
998 SparseTensorIndexCSF::VT_INDPTRBUFFERS,
999 indptrBuffers,
1000 );
1001 }
1002 #[inline]
1003 pub fn add_indicesType(&mut self, indicesType: flatbuffers::WIPOffset<Int<'b>>) {
1004 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<Int>>(
1005 SparseTensorIndexCSF::VT_INDICESTYPE,
1006 indicesType,
1007 );
1008 }
1009 #[inline]
1010 pub fn add_indicesBuffers(
1011 &mut self,
1012 indicesBuffers: flatbuffers::WIPOffset<flatbuffers::Vector<'b, Buffer>>,
1013 ) {
1014 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1015 SparseTensorIndexCSF::VT_INDICESBUFFERS,
1016 indicesBuffers,
1017 );
1018 }
1019 #[inline]
1020 pub fn add_axisOrder(
1021 &mut self,
1022 axisOrder: flatbuffers::WIPOffset<flatbuffers::Vector<'b, i32>>,
1023 ) {
1024 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1025 SparseTensorIndexCSF::VT_AXISORDER,
1026 axisOrder,
1027 );
1028 }
1029 #[inline]
1030 pub fn new(
1031 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
1032 ) -> SparseTensorIndexCSFBuilder<'a, 'b> {
1033 let start = _fbb.start_table();
1034 SparseTensorIndexCSFBuilder {
1035 fbb_: _fbb,
1036 start_: start,
1037 }
1038 }
1039 #[inline]
1040 pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensorIndexCSF<'a>> {
1041 let o = self.fbb_.end_table(self.start_);
1042 self.fbb_
1043 .required(o, SparseTensorIndexCSF::VT_INDPTRTYPE, "indptrType");
1044 self.fbb_
1045 .required(o, SparseTensorIndexCSF::VT_INDPTRBUFFERS, "indptrBuffers");
1046 self.fbb_
1047 .required(o, SparseTensorIndexCSF::VT_INDICESTYPE, "indicesType");
1048 self.fbb_
1049 .required(o, SparseTensorIndexCSF::VT_INDICESBUFFERS, "indicesBuffers");
1050 self.fbb_
1051 .required(o, SparseTensorIndexCSF::VT_AXISORDER, "axisOrder");
1052 flatbuffers::WIPOffset::new(o.value())
1053 }
1054}
1055
1056impl core::fmt::Debug for SparseTensorIndexCSF<'_> {
1057 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1058 let mut ds = f.debug_struct("SparseTensorIndexCSF");
1059 ds.field("indptrType", &self.indptrType());
1060 ds.field("indptrBuffers", &self.indptrBuffers());
1061 ds.field("indicesType", &self.indicesType());
1062 ds.field("indicesBuffers", &self.indicesBuffers());
1063 ds.field("axisOrder", &self.axisOrder());
1064 ds.finish()
1065 }
1066}
1067pub enum SparseTensorOffset {}
1068#[derive(Copy, Clone, PartialEq)]
1069
1070pub struct SparseTensor<'a> {
1071 pub _tab: flatbuffers::Table<'a>,
1072}
1073
1074impl<'a> flatbuffers::Follow<'a> for SparseTensor<'a> {
1075 type Inner = SparseTensor<'a>;
1076 #[inline]
1077 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
1078 Self {
1079 _tab: flatbuffers::Table::new(buf, loc),
1080 }
1081 }
1082}
1083
1084impl<'a> SparseTensor<'a> {
1085 pub const VT_TYPE_TYPE: flatbuffers::VOffsetT = 4;
1086 pub const VT_TYPE_: flatbuffers::VOffsetT = 6;
1087 pub const VT_SHAPE: flatbuffers::VOffsetT = 8;
1088 pub const VT_NON_ZERO_LENGTH: flatbuffers::VOffsetT = 10;
1089 pub const VT_SPARSEINDEX_TYPE: flatbuffers::VOffsetT = 12;
1090 pub const VT_SPARSEINDEX: flatbuffers::VOffsetT = 14;
1091 pub const VT_DATA: flatbuffers::VOffsetT = 16;
1092
1093 #[inline]
1094 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
1095 SparseTensor { _tab: table }
1096 }
1097 #[allow(unused_mut)]
1098 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
1099 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
1100 args: &'args SparseTensorArgs<'args>,
1101 ) -> flatbuffers::WIPOffset<SparseTensor<'bldr>> {
1102 let mut builder = SparseTensorBuilder::new(_fbb);
1103 builder.add_non_zero_length(args.non_zero_length);
1104 if let Some(x) = args.data {
1105 builder.add_data(x);
1106 }
1107 if let Some(x) = args.sparseIndex {
1108 builder.add_sparseIndex(x);
1109 }
1110 if let Some(x) = args.shape {
1111 builder.add_shape(x);
1112 }
1113 if let Some(x) = args.type_ {
1114 builder.add_type_(x);
1115 }
1116 builder.add_sparseIndex_type(args.sparseIndex_type);
1117 builder.add_type_type(args.type_type);
1118 builder.finish()
1119 }
1120
1121 #[inline]
1122 pub fn type_type(&self) -> Type {
1123 unsafe {
1127 self._tab
1128 .get::<Type>(SparseTensor::VT_TYPE_TYPE, Some(Type::NONE))
1129 .unwrap()
1130 }
1131 }
1132 #[inline]
1136 pub fn type_(&self) -> flatbuffers::Table<'a> {
1137 unsafe {
1141 self._tab
1142 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
1143 SparseTensor::VT_TYPE_,
1144 None,
1145 )
1146 .unwrap()
1147 }
1148 }
1149 #[inline]
1151 pub fn shape(&self) -> flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<TensorDim<'a>>> {
1152 unsafe {
1156 self._tab
1157 .get::<flatbuffers::ForwardsUOffset<
1158 flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<TensorDim>>,
1159 >>(SparseTensor::VT_SHAPE, None)
1160 .unwrap()
1161 }
1162 }
1163 #[inline]
1165 pub fn non_zero_length(&self) -> i64 {
1166 unsafe {
1170 self._tab
1171 .get::<i64>(SparseTensor::VT_NON_ZERO_LENGTH, Some(0))
1172 .unwrap()
1173 }
1174 }
1175 #[inline]
1176 pub fn sparseIndex_type(&self) -> SparseTensorIndex {
1177 unsafe {
1181 self._tab
1182 .get::<SparseTensorIndex>(
1183 SparseTensor::VT_SPARSEINDEX_TYPE,
1184 Some(SparseTensorIndex::NONE),
1185 )
1186 .unwrap()
1187 }
1188 }
1189 #[inline]
1191 pub fn sparseIndex(&self) -> flatbuffers::Table<'a> {
1192 unsafe {
1196 self._tab
1197 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
1198 SparseTensor::VT_SPARSEINDEX,
1199 None,
1200 )
1201 .unwrap()
1202 }
1203 }
1204 #[inline]
1206 pub fn data(&self) -> &'a Buffer {
1207 unsafe {
1211 self._tab
1212 .get::<Buffer>(SparseTensor::VT_DATA, None)
1213 .unwrap()
1214 }
1215 }
1216 #[inline]
1217 #[allow(non_snake_case)]
1218 pub fn type_as_null(&self) -> Option<Null<'a>> {
1219 if self.type_type() == Type::Null {
1220 let u = self.type_();
1221 Some(unsafe { Null::init_from_table(u) })
1225 } else {
1226 None
1227 }
1228 }
1229
1230 #[inline]
1231 #[allow(non_snake_case)]
1232 pub fn type_as_int(&self) -> Option<Int<'a>> {
1233 if self.type_type() == Type::Int {
1234 let u = self.type_();
1235 Some(unsafe { Int::init_from_table(u) })
1239 } else {
1240 None
1241 }
1242 }
1243
1244 #[inline]
1245 #[allow(non_snake_case)]
1246 pub fn type_as_floating_point(&self) -> Option<FloatingPoint<'a>> {
1247 if self.type_type() == Type::FloatingPoint {
1248 let u = self.type_();
1249 Some(unsafe { FloatingPoint::init_from_table(u) })
1253 } else {
1254 None
1255 }
1256 }
1257
1258 #[inline]
1259 #[allow(non_snake_case)]
1260 pub fn type_as_binary(&self) -> Option<Binary<'a>> {
1261 if self.type_type() == Type::Binary {
1262 let u = self.type_();
1263 Some(unsafe { Binary::init_from_table(u) })
1267 } else {
1268 None
1269 }
1270 }
1271
1272 #[inline]
1273 #[allow(non_snake_case)]
1274 pub fn type_as_utf_8(&self) -> Option<Utf8<'a>> {
1275 if self.type_type() == Type::Utf8 {
1276 let u = self.type_();
1277 Some(unsafe { Utf8::init_from_table(u) })
1281 } else {
1282 None
1283 }
1284 }
1285
1286 #[inline]
1287 #[allow(non_snake_case)]
1288 pub fn type_as_bool(&self) -> Option<Bool<'a>> {
1289 if self.type_type() == Type::Bool {
1290 let u = self.type_();
1291 Some(unsafe { Bool::init_from_table(u) })
1295 } else {
1296 None
1297 }
1298 }
1299
1300 #[inline]
1301 #[allow(non_snake_case)]
1302 pub fn type_as_decimal(&self) -> Option<Decimal<'a>> {
1303 if self.type_type() == Type::Decimal {
1304 let u = self.type_();
1305 Some(unsafe { Decimal::init_from_table(u) })
1309 } else {
1310 None
1311 }
1312 }
1313
1314 #[inline]
1315 #[allow(non_snake_case)]
1316 pub fn type_as_date(&self) -> Option<Date<'a>> {
1317 if self.type_type() == Type::Date {
1318 let u = self.type_();
1319 Some(unsafe { Date::init_from_table(u) })
1323 } else {
1324 None
1325 }
1326 }
1327
1328 #[inline]
1329 #[allow(non_snake_case)]
1330 pub fn type_as_time(&self) -> Option<Time<'a>> {
1331 if self.type_type() == Type::Time {
1332 let u = self.type_();
1333 Some(unsafe { Time::init_from_table(u) })
1337 } else {
1338 None
1339 }
1340 }
1341
1342 #[inline]
1343 #[allow(non_snake_case)]
1344 pub fn type_as_timestamp(&self) -> Option<Timestamp<'a>> {
1345 if self.type_type() == Type::Timestamp {
1346 let u = self.type_();
1347 Some(unsafe { Timestamp::init_from_table(u) })
1351 } else {
1352 None
1353 }
1354 }
1355
1356 #[inline]
1357 #[allow(non_snake_case)]
1358 pub fn type_as_interval(&self) -> Option<Interval<'a>> {
1359 if self.type_type() == Type::Interval {
1360 let u = self.type_();
1361 Some(unsafe { Interval::init_from_table(u) })
1365 } else {
1366 None
1367 }
1368 }
1369
1370 #[inline]
1371 #[allow(non_snake_case)]
1372 pub fn type_as_list(&self) -> Option<List<'a>> {
1373 if self.type_type() == Type::List {
1374 let u = self.type_();
1375 Some(unsafe { List::init_from_table(u) })
1379 } else {
1380 None
1381 }
1382 }
1383
1384 #[inline]
1385 #[allow(non_snake_case)]
1386 pub fn type_as_struct_(&self) -> Option<Struct_<'a>> {
1387 if self.type_type() == Type::Struct_ {
1388 let u = self.type_();
1389 Some(unsafe { Struct_::init_from_table(u) })
1393 } else {
1394 None
1395 }
1396 }
1397
1398 #[inline]
1399 #[allow(non_snake_case)]
1400 pub fn type_as_union(&self) -> Option<Union<'a>> {
1401 if self.type_type() == Type::Union {
1402 let u = self.type_();
1403 Some(unsafe { Union::init_from_table(u) })
1407 } else {
1408 None
1409 }
1410 }
1411
1412 #[inline]
1413 #[allow(non_snake_case)]
1414 pub fn type_as_fixed_size_binary(&self) -> Option<FixedSizeBinary<'a>> {
1415 if self.type_type() == Type::FixedSizeBinary {
1416 let u = self.type_();
1417 Some(unsafe { FixedSizeBinary::init_from_table(u) })
1421 } else {
1422 None
1423 }
1424 }
1425
1426 #[inline]
1427 #[allow(non_snake_case)]
1428 pub fn type_as_fixed_size_list(&self) -> Option<FixedSizeList<'a>> {
1429 if self.type_type() == Type::FixedSizeList {
1430 let u = self.type_();
1431 Some(unsafe { FixedSizeList::init_from_table(u) })
1435 } else {
1436 None
1437 }
1438 }
1439
1440 #[inline]
1441 #[allow(non_snake_case)]
1442 pub fn type_as_map(&self) -> Option<Map<'a>> {
1443 if self.type_type() == Type::Map {
1444 let u = self.type_();
1445 Some(unsafe { Map::init_from_table(u) })
1449 } else {
1450 None
1451 }
1452 }
1453
1454 #[inline]
1455 #[allow(non_snake_case)]
1456 pub fn type_as_duration(&self) -> Option<Duration<'a>> {
1457 if self.type_type() == Type::Duration {
1458 let u = self.type_();
1459 Some(unsafe { Duration::init_from_table(u) })
1463 } else {
1464 None
1465 }
1466 }
1467
1468 #[inline]
1469 #[allow(non_snake_case)]
1470 pub fn type_as_large_binary(&self) -> Option<LargeBinary<'a>> {
1471 if self.type_type() == Type::LargeBinary {
1472 let u = self.type_();
1473 Some(unsafe { LargeBinary::init_from_table(u) })
1477 } else {
1478 None
1479 }
1480 }
1481
1482 #[inline]
1483 #[allow(non_snake_case)]
1484 pub fn type_as_large_utf_8(&self) -> Option<LargeUtf8<'a>> {
1485 if self.type_type() == Type::LargeUtf8 {
1486 let u = self.type_();
1487 Some(unsafe { LargeUtf8::init_from_table(u) })
1491 } else {
1492 None
1493 }
1494 }
1495
1496 #[inline]
1497 #[allow(non_snake_case)]
1498 pub fn type_as_large_list(&self) -> Option<LargeList<'a>> {
1499 if self.type_type() == Type::LargeList {
1500 let u = self.type_();
1501 Some(unsafe { LargeList::init_from_table(u) })
1505 } else {
1506 None
1507 }
1508 }
1509
1510 #[inline]
1511 #[allow(non_snake_case)]
1512 pub fn type_as_run_end_encoded(&self) -> Option<RunEndEncoded<'a>> {
1513 if self.type_type() == Type::RunEndEncoded {
1514 let u = self.type_();
1515 Some(unsafe { RunEndEncoded::init_from_table(u) })
1519 } else {
1520 None
1521 }
1522 }
1523
1524 #[inline]
1525 #[allow(non_snake_case)]
1526 pub fn sparseIndex_as_sparse_tensor_index_coo(&self) -> Option<SparseTensorIndexCOO<'a>> {
1527 if self.sparseIndex_type() == SparseTensorIndex::SparseTensorIndexCOO {
1528 let u = self.sparseIndex();
1529 Some(unsafe { SparseTensorIndexCOO::init_from_table(u) })
1533 } else {
1534 None
1535 }
1536 }
1537
1538 #[inline]
1539 #[allow(non_snake_case)]
1540 pub fn sparseIndex_as_sparse_matrix_index_csx(&self) -> Option<SparseMatrixIndexCSX<'a>> {
1541 if self.sparseIndex_type() == SparseTensorIndex::SparseMatrixIndexCSX {
1542 let u = self.sparseIndex();
1543 Some(unsafe { SparseMatrixIndexCSX::init_from_table(u) })
1547 } else {
1548 None
1549 }
1550 }
1551
1552 #[inline]
1553 #[allow(non_snake_case)]
1554 pub fn sparseIndex_as_sparse_tensor_index_csf(&self) -> Option<SparseTensorIndexCSF<'a>> {
1555 if self.sparseIndex_type() == SparseTensorIndex::SparseTensorIndexCSF {
1556 let u = self.sparseIndex();
1557 Some(unsafe { SparseTensorIndexCSF::init_from_table(u) })
1561 } else {
1562 None
1563 }
1564 }
1565}
1566
1567impl flatbuffers::Verifiable for SparseTensor<'_> {
1568 #[inline]
1569 fn run_verifier(
1570 v: &mut flatbuffers::Verifier,
1571 pos: usize,
1572 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
1573 use flatbuffers::Verifiable;
1574 v.visit_table(pos)?
1575 .visit_union::<Type, _>(
1576 "type_type",
1577 Self::VT_TYPE_TYPE,
1578 "type_",
1579 Self::VT_TYPE_,
1580 true,
1581 |key, v, pos| match key {
1582 Type::Null => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Null>>(
1583 "Type::Null",
1584 pos,
1585 ),
1586 Type::Int => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Int>>(
1587 "Type::Int",
1588 pos,
1589 ),
1590 Type::FloatingPoint => v
1591 .verify_union_variant::<flatbuffers::ForwardsUOffset<FloatingPoint>>(
1592 "Type::FloatingPoint",
1593 pos,
1594 ),
1595 Type::Binary => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Binary>>(
1596 "Type::Binary",
1597 pos,
1598 ),
1599 Type::Utf8 => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Utf8>>(
1600 "Type::Utf8",
1601 pos,
1602 ),
1603 Type::Bool => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Bool>>(
1604 "Type::Bool",
1605 pos,
1606 ),
1607 Type::Decimal => v
1608 .verify_union_variant::<flatbuffers::ForwardsUOffset<Decimal>>(
1609 "Type::Decimal",
1610 pos,
1611 ),
1612 Type::Date => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Date>>(
1613 "Type::Date",
1614 pos,
1615 ),
1616 Type::Time => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Time>>(
1617 "Type::Time",
1618 pos,
1619 ),
1620 Type::Timestamp => v
1621 .verify_union_variant::<flatbuffers::ForwardsUOffset<Timestamp>>(
1622 "Type::Timestamp",
1623 pos,
1624 ),
1625 Type::Interval => v
1626 .verify_union_variant::<flatbuffers::ForwardsUOffset<Interval>>(
1627 "Type::Interval",
1628 pos,
1629 ),
1630 Type::List => v.verify_union_variant::<flatbuffers::ForwardsUOffset<List>>(
1631 "Type::List",
1632 pos,
1633 ),
1634 Type::Struct_ => v
1635 .verify_union_variant::<flatbuffers::ForwardsUOffset<Struct_>>(
1636 "Type::Struct_",
1637 pos,
1638 ),
1639 Type::Union => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Union>>(
1640 "Type::Union",
1641 pos,
1642 ),
1643 Type::FixedSizeBinary => v
1644 .verify_union_variant::<flatbuffers::ForwardsUOffset<FixedSizeBinary>>(
1645 "Type::FixedSizeBinary",
1646 pos,
1647 ),
1648 Type::FixedSizeList => v
1649 .verify_union_variant::<flatbuffers::ForwardsUOffset<FixedSizeList>>(
1650 "Type::FixedSizeList",
1651 pos,
1652 ),
1653 Type::Map => v.verify_union_variant::<flatbuffers::ForwardsUOffset<Map>>(
1654 "Type::Map",
1655 pos,
1656 ),
1657 Type::Duration => v
1658 .verify_union_variant::<flatbuffers::ForwardsUOffset<Duration>>(
1659 "Type::Duration",
1660 pos,
1661 ),
1662 Type::LargeBinary => v
1663 .verify_union_variant::<flatbuffers::ForwardsUOffset<LargeBinary>>(
1664 "Type::LargeBinary",
1665 pos,
1666 ),
1667 Type::LargeUtf8 => v
1668 .verify_union_variant::<flatbuffers::ForwardsUOffset<LargeUtf8>>(
1669 "Type::LargeUtf8",
1670 pos,
1671 ),
1672 Type::LargeList => v
1673 .verify_union_variant::<flatbuffers::ForwardsUOffset<LargeList>>(
1674 "Type::LargeList",
1675 pos,
1676 ),
1677 Type::RunEndEncoded => v
1678 .verify_union_variant::<flatbuffers::ForwardsUOffset<RunEndEncoded>>(
1679 "Type::RunEndEncoded",
1680 pos,
1681 ),
1682 _ => Ok(()),
1683 },
1684 )?
1685 .visit_field::<flatbuffers::ForwardsUOffset<
1686 flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<TensorDim>>,
1687 >>("shape", Self::VT_SHAPE, true)?
1688 .visit_field::<i64>("non_zero_length", Self::VT_NON_ZERO_LENGTH, false)?
1689 .visit_union::<SparseTensorIndex, _>(
1690 "sparseIndex_type",
1691 Self::VT_SPARSEINDEX_TYPE,
1692 "sparseIndex",
1693 Self::VT_SPARSEINDEX,
1694 true,
1695 |key, v, pos| match key {
1696 SparseTensorIndex::SparseTensorIndexCOO => v
1697 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseTensorIndexCOO>>(
1698 "SparseTensorIndex::SparseTensorIndexCOO",
1699 pos,
1700 ),
1701 SparseTensorIndex::SparseMatrixIndexCSX => v
1702 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseMatrixIndexCSX>>(
1703 "SparseTensorIndex::SparseMatrixIndexCSX",
1704 pos,
1705 ),
1706 SparseTensorIndex::SparseTensorIndexCSF => v
1707 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseTensorIndexCSF>>(
1708 "SparseTensorIndex::SparseTensorIndexCSF",
1709 pos,
1710 ),
1711 _ => Ok(()),
1712 },
1713 )?
1714 .visit_field::<Buffer>("data", Self::VT_DATA, true)?
1715 .finish();
1716 Ok(())
1717 }
1718}
1719pub struct SparseTensorArgs<'a> {
1720 pub type_type: Type,
1721 pub type_: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
1722 pub shape: Option<
1723 flatbuffers::WIPOffset<
1724 flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<TensorDim<'a>>>,
1725 >,
1726 >,
1727 pub non_zero_length: i64,
1728 pub sparseIndex_type: SparseTensorIndex,
1729 pub sparseIndex: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
1730 pub data: Option<&'a Buffer>,
1731}
1732impl<'a> Default for SparseTensorArgs<'a> {
1733 #[inline]
1734 fn default() -> Self {
1735 SparseTensorArgs {
1736 type_type: Type::NONE,
1737 type_: None, shape: None, non_zero_length: 0,
1740 sparseIndex_type: SparseTensorIndex::NONE,
1741 sparseIndex: None, data: None, }
1744 }
1745}
1746
1747pub struct SparseTensorBuilder<'a: 'b, 'b> {
1748 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
1749 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
1750}
1751impl<'a: 'b, 'b> SparseTensorBuilder<'a, 'b> {
1752 #[inline]
1753 pub fn add_type_type(&mut self, type_type: Type) {
1754 self.fbb_
1755 .push_slot::<Type>(SparseTensor::VT_TYPE_TYPE, type_type, Type::NONE);
1756 }
1757 #[inline]
1758 pub fn add_type_(&mut self, type_: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
1759 self.fbb_
1760 .push_slot_always::<flatbuffers::WIPOffset<_>>(SparseTensor::VT_TYPE_, type_);
1761 }
1762 #[inline]
1763 pub fn add_shape(
1764 &mut self,
1765 shape: flatbuffers::WIPOffset<
1766 flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<TensorDim<'b>>>,
1767 >,
1768 ) {
1769 self.fbb_
1770 .push_slot_always::<flatbuffers::WIPOffset<_>>(SparseTensor::VT_SHAPE, shape);
1771 }
1772 #[inline]
1773 pub fn add_non_zero_length(&mut self, non_zero_length: i64) {
1774 self.fbb_
1775 .push_slot::<i64>(SparseTensor::VT_NON_ZERO_LENGTH, non_zero_length, 0);
1776 }
1777 #[inline]
1778 pub fn add_sparseIndex_type(&mut self, sparseIndex_type: SparseTensorIndex) {
1779 self.fbb_.push_slot::<SparseTensorIndex>(
1780 SparseTensor::VT_SPARSEINDEX_TYPE,
1781 sparseIndex_type,
1782 SparseTensorIndex::NONE,
1783 );
1784 }
1785 #[inline]
1786 pub fn add_sparseIndex(
1787 &mut self,
1788 sparseIndex: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>,
1789 ) {
1790 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1791 SparseTensor::VT_SPARSEINDEX,
1792 sparseIndex,
1793 );
1794 }
1795 #[inline]
1796 pub fn add_data(&mut self, data: &Buffer) {
1797 self.fbb_
1798 .push_slot_always::<&Buffer>(SparseTensor::VT_DATA, data);
1799 }
1800 #[inline]
1801 pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> SparseTensorBuilder<'a, 'b> {
1802 let start = _fbb.start_table();
1803 SparseTensorBuilder {
1804 fbb_: _fbb,
1805 start_: start,
1806 }
1807 }
1808 #[inline]
1809 pub fn finish(self) -> flatbuffers::WIPOffset<SparseTensor<'a>> {
1810 let o = self.fbb_.end_table(self.start_);
1811 self.fbb_.required(o, SparseTensor::VT_TYPE_, "type_");
1812 self.fbb_.required(o, SparseTensor::VT_SHAPE, "shape");
1813 self.fbb_
1814 .required(o, SparseTensor::VT_SPARSEINDEX, "sparseIndex");
1815 self.fbb_.required(o, SparseTensor::VT_DATA, "data");
1816 flatbuffers::WIPOffset::new(o.value())
1817 }
1818}
1819
1820impl core::fmt::Debug for SparseTensor<'_> {
1821 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1822 let mut ds = f.debug_struct("SparseTensor");
1823 ds.field("type_type", &self.type_type());
1824 match self.type_type() {
1825 Type::Null => {
1826 if let Some(x) = self.type_as_null() {
1827 ds.field("type_", &x)
1828 } else {
1829 ds.field(
1830 "type_",
1831 &"InvalidFlatbuffer: Union discriminant does not match value.",
1832 )
1833 }
1834 }
1835 Type::Int => {
1836 if let Some(x) = self.type_as_int() {
1837 ds.field("type_", &x)
1838 } else {
1839 ds.field(
1840 "type_",
1841 &"InvalidFlatbuffer: Union discriminant does not match value.",
1842 )
1843 }
1844 }
1845 Type::FloatingPoint => {
1846 if let Some(x) = self.type_as_floating_point() {
1847 ds.field("type_", &x)
1848 } else {
1849 ds.field(
1850 "type_",
1851 &"InvalidFlatbuffer: Union discriminant does not match value.",
1852 )
1853 }
1854 }
1855 Type::Binary => {
1856 if let Some(x) = self.type_as_binary() {
1857 ds.field("type_", &x)
1858 } else {
1859 ds.field(
1860 "type_",
1861 &"InvalidFlatbuffer: Union discriminant does not match value.",
1862 )
1863 }
1864 }
1865 Type::Utf8 => {
1866 if let Some(x) = self.type_as_utf_8() {
1867 ds.field("type_", &x)
1868 } else {
1869 ds.field(
1870 "type_",
1871 &"InvalidFlatbuffer: Union discriminant does not match value.",
1872 )
1873 }
1874 }
1875 Type::Bool => {
1876 if let Some(x) = self.type_as_bool() {
1877 ds.field("type_", &x)
1878 } else {
1879 ds.field(
1880 "type_",
1881 &"InvalidFlatbuffer: Union discriminant does not match value.",
1882 )
1883 }
1884 }
1885 Type::Decimal => {
1886 if let Some(x) = self.type_as_decimal() {
1887 ds.field("type_", &x)
1888 } else {
1889 ds.field(
1890 "type_",
1891 &"InvalidFlatbuffer: Union discriminant does not match value.",
1892 )
1893 }
1894 }
1895 Type::Date => {
1896 if let Some(x) = self.type_as_date() {
1897 ds.field("type_", &x)
1898 } else {
1899 ds.field(
1900 "type_",
1901 &"InvalidFlatbuffer: Union discriminant does not match value.",
1902 )
1903 }
1904 }
1905 Type::Time => {
1906 if let Some(x) = self.type_as_time() {
1907 ds.field("type_", &x)
1908 } else {
1909 ds.field(
1910 "type_",
1911 &"InvalidFlatbuffer: Union discriminant does not match value.",
1912 )
1913 }
1914 }
1915 Type::Timestamp => {
1916 if let Some(x) = self.type_as_timestamp() {
1917 ds.field("type_", &x)
1918 } else {
1919 ds.field(
1920 "type_",
1921 &"InvalidFlatbuffer: Union discriminant does not match value.",
1922 )
1923 }
1924 }
1925 Type::Interval => {
1926 if let Some(x) = self.type_as_interval() {
1927 ds.field("type_", &x)
1928 } else {
1929 ds.field(
1930 "type_",
1931 &"InvalidFlatbuffer: Union discriminant does not match value.",
1932 )
1933 }
1934 }
1935 Type::List => {
1936 if let Some(x) = self.type_as_list() {
1937 ds.field("type_", &x)
1938 } else {
1939 ds.field(
1940 "type_",
1941 &"InvalidFlatbuffer: Union discriminant does not match value.",
1942 )
1943 }
1944 }
1945 Type::Struct_ => {
1946 if let Some(x) = self.type_as_struct_() {
1947 ds.field("type_", &x)
1948 } else {
1949 ds.field(
1950 "type_",
1951 &"InvalidFlatbuffer: Union discriminant does not match value.",
1952 )
1953 }
1954 }
1955 Type::Union => {
1956 if let Some(x) = self.type_as_union() {
1957 ds.field("type_", &x)
1958 } else {
1959 ds.field(
1960 "type_",
1961 &"InvalidFlatbuffer: Union discriminant does not match value.",
1962 )
1963 }
1964 }
1965 Type::FixedSizeBinary => {
1966 if let Some(x) = self.type_as_fixed_size_binary() {
1967 ds.field("type_", &x)
1968 } else {
1969 ds.field(
1970 "type_",
1971 &"InvalidFlatbuffer: Union discriminant does not match value.",
1972 )
1973 }
1974 }
1975 Type::FixedSizeList => {
1976 if let Some(x) = self.type_as_fixed_size_list() {
1977 ds.field("type_", &x)
1978 } else {
1979 ds.field(
1980 "type_",
1981 &"InvalidFlatbuffer: Union discriminant does not match value.",
1982 )
1983 }
1984 }
1985 Type::Map => {
1986 if let Some(x) = self.type_as_map() {
1987 ds.field("type_", &x)
1988 } else {
1989 ds.field(
1990 "type_",
1991 &"InvalidFlatbuffer: Union discriminant does not match value.",
1992 )
1993 }
1994 }
1995 Type::Duration => {
1996 if let Some(x) = self.type_as_duration() {
1997 ds.field("type_", &x)
1998 } else {
1999 ds.field(
2000 "type_",
2001 &"InvalidFlatbuffer: Union discriminant does not match value.",
2002 )
2003 }
2004 }
2005 Type::LargeBinary => {
2006 if let Some(x) = self.type_as_large_binary() {
2007 ds.field("type_", &x)
2008 } else {
2009 ds.field(
2010 "type_",
2011 &"InvalidFlatbuffer: Union discriminant does not match value.",
2012 )
2013 }
2014 }
2015 Type::LargeUtf8 => {
2016 if let Some(x) = self.type_as_large_utf_8() {
2017 ds.field("type_", &x)
2018 } else {
2019 ds.field(
2020 "type_",
2021 &"InvalidFlatbuffer: Union discriminant does not match value.",
2022 )
2023 }
2024 }
2025 Type::LargeList => {
2026 if let Some(x) = self.type_as_large_list() {
2027 ds.field("type_", &x)
2028 } else {
2029 ds.field(
2030 "type_",
2031 &"InvalidFlatbuffer: Union discriminant does not match value.",
2032 )
2033 }
2034 }
2035 Type::RunEndEncoded => {
2036 if let Some(x) = self.type_as_run_end_encoded() {
2037 ds.field("type_", &x)
2038 } else {
2039 ds.field(
2040 "type_",
2041 &"InvalidFlatbuffer: Union discriminant does not match value.",
2042 )
2043 }
2044 }
2045 _ => {
2046 let x: Option<()> = None;
2047 ds.field("type_", &x)
2048 }
2049 };
2050 ds.field("shape", &self.shape());
2051 ds.field("non_zero_length", &self.non_zero_length());
2052 ds.field("sparseIndex_type", &self.sparseIndex_type());
2053 match self.sparseIndex_type() {
2054 SparseTensorIndex::SparseTensorIndexCOO => {
2055 if let Some(x) = self.sparseIndex_as_sparse_tensor_index_coo() {
2056 ds.field("sparseIndex", &x)
2057 } else {
2058 ds.field(
2059 "sparseIndex",
2060 &"InvalidFlatbuffer: Union discriminant does not match value.",
2061 )
2062 }
2063 }
2064 SparseTensorIndex::SparseMatrixIndexCSX => {
2065 if let Some(x) = self.sparseIndex_as_sparse_matrix_index_csx() {
2066 ds.field("sparseIndex", &x)
2067 } else {
2068 ds.field(
2069 "sparseIndex",
2070 &"InvalidFlatbuffer: Union discriminant does not match value.",
2071 )
2072 }
2073 }
2074 SparseTensorIndex::SparseTensorIndexCSF => {
2075 if let Some(x) = self.sparseIndex_as_sparse_tensor_index_csf() {
2076 ds.field("sparseIndex", &x)
2077 } else {
2078 ds.field(
2079 "sparseIndex",
2080 &"InvalidFlatbuffer: Union discriminant does not match value.",
2081 )
2082 }
2083 }
2084 _ => {
2085 let x: Option<()> = None;
2086 ds.field("sparseIndex", &x)
2087 }
2088 };
2089 ds.field("data", &self.data());
2090 ds.finish()
2091 }
2092}
2093#[inline]
2094pub fn root_as_sparse_tensor(buf: &[u8]) -> Result<SparseTensor, flatbuffers::InvalidFlatbuffer> {
2101 flatbuffers::root::<SparseTensor>(buf)
2102}
2103#[inline]
2104pub fn size_prefixed_root_as_sparse_tensor(
2111 buf: &[u8],
2112) -> Result<SparseTensor, flatbuffers::InvalidFlatbuffer> {
2113 flatbuffers::size_prefixed_root::<SparseTensor>(buf)
2114}
2115#[inline]
2116pub fn root_as_sparse_tensor_with_opts<'b, 'o>(
2123 opts: &'o flatbuffers::VerifierOptions,
2124 buf: &'b [u8],
2125) -> Result<SparseTensor<'b>, flatbuffers::InvalidFlatbuffer> {
2126 flatbuffers::root_with_opts::<SparseTensor<'b>>(opts, buf)
2127}
2128#[inline]
2129pub fn size_prefixed_root_as_sparse_tensor_with_opts<'b, 'o>(
2136 opts: &'o flatbuffers::VerifierOptions,
2137 buf: &'b [u8],
2138) -> Result<SparseTensor<'b>, flatbuffers::InvalidFlatbuffer> {
2139 flatbuffers::size_prefixed_root_with_opts::<SparseTensor<'b>>(opts, buf)
2140}
2141#[inline]
2142pub unsafe fn root_as_sparse_tensor_unchecked(buf: &[u8]) -> SparseTensor {
2146 flatbuffers::root_unchecked::<SparseTensor>(buf)
2147}
2148#[inline]
2149pub unsafe fn size_prefixed_root_as_sparse_tensor_unchecked(buf: &[u8]) -> SparseTensor {
2153 flatbuffers::size_prefixed_root_unchecked::<SparseTensor>(buf)
2154}
2155#[inline]
2156pub fn finish_sparse_tensor_buffer<'a, 'b>(
2157 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
2158 root: flatbuffers::WIPOffset<SparseTensor<'a>>,
2159) {
2160 fbb.finish(root, None);
2161}
2162
2163#[inline]
2164pub fn finish_size_prefixed_sparse_tensor_buffer<'a, 'b>(
2165 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
2166 root: flatbuffers::WIPOffset<SparseTensor<'a>>,
2167) {
2168 fbb.finish_size_prefixed(root, None);
2169}