1 // automatically generated by the FlatBuffers compiler, do not modify
2
3
4
5 use std::mem;
6 use std::cmp::Ordering;
7
8 extern crate flatbuffers;
9 use self::flatbuffers::{EndianScalar, Follow};
10
11 #[allow(unused_imports, dead_code)]
12 pub mod my_game {
13
14 use std::mem;
15 use std::cmp::Ordering;
16
17 extern crate flatbuffers;
18 use self::flatbuffers::{EndianScalar, Follow};
19 #[allow(unused_imports, dead_code)]
20 pub mod example {
21
22 use std::mem;
23 use std::cmp::Ordering;
24
25 extern crate flatbuffers;
26 use self::flatbuffers::{EndianScalar, Follow};
27
28 #[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
29 pub const ENUM_MIN_TEST_ENUM: i8 = 0;
30 #[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
31 pub const ENUM_MAX_TEST_ENUM: i8 = 2;
32 #[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
33 #[allow(non_camel_case_types)]
34 pub const ENUM_VALUES_TEST_ENUM: [TestEnum; 3] = [
35 TestEnum::A,
36 TestEnum::B,
37 TestEnum::C,
38 ];
39
40 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
41 #[repr(transparent)]
42 pub struct TestEnum(pub i8);
43 #[allow(non_upper_case_globals)]
44 impl TestEnum {
45 pub const A: Self = Self(0);
46 pub const B: Self = Self(1);
47 pub const C: Self = Self(2);
48
49 pub const ENUM_MIN: i8 = 0;
50 pub const ENUM_MAX: i8 = 2;
51 pub const ENUM_VALUES: &'static [Self] = &[
52 Self::A,
53 Self::B,
54 Self::C,
55 ];
56 /// Returns the variant's name or "" if unknown.
variant_name(self) -> Option<&'static str>57 pub fn variant_name(self) -> Option<&'static str> {
58 match self {
59 Self::A => Some("A"),
60 Self::B => Some("B"),
61 Self::C => Some("C"),
62 _ => None,
63 }
64 }
65 }
66 impl std::fmt::Debug for TestEnum {
fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result67 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
68 if let Some(name) = self.variant_name() {
69 f.write_str(name)
70 } else {
71 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
72 }
73 }
74 }
75 impl<'a> flatbuffers::Follow<'a> for TestEnum {
76 type Inner = Self;
77 #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner78 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
79 let b = unsafe {
80 flatbuffers::read_scalar_at::<i8>(buf, loc)
81 };
82 Self(b)
83 }
84 }
85
86 impl flatbuffers::Push for TestEnum {
87 type Output = TestEnum;
88 #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])89 fn push(&self, dst: &mut [u8], _rest: &[u8]) {
90 unsafe { flatbuffers::emplace_scalar::<i8>(dst, self.0); }
91 }
92 }
93
94 impl flatbuffers::EndianScalar for TestEnum {
95 #[inline]
to_little_endian(self) -> Self96 fn to_little_endian(self) -> Self {
97 let b = i8::to_le(self.0);
98 Self(b)
99 }
100 #[inline]
101 #[allow(clippy::wrong_self_convention)]
from_little_endian(self) -> Self102 fn from_little_endian(self) -> Self {
103 let b = i8::from_le(self.0);
104 Self(b)
105 }
106 }
107
108 impl<'a> flatbuffers::Verifiable for TestEnum {
109 #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>110 fn run_verifier(
111 v: &mut flatbuffers::Verifier, pos: usize
112 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
113 use self::flatbuffers::Verifiable;
114 i8::run_verifier(v, pos)
115 }
116 }
117
118 impl flatbuffers::SimpleToVerifyInSlice for TestEnum {}
119 // struct NestedStruct, aligned to 8
120 #[repr(transparent)]
121 #[derive(Clone, Copy, PartialEq)]
122 pub struct NestedStruct(pub [u8; 32]);
123 impl Default for NestedStruct {
default() -> Self124 fn default() -> Self {
125 Self([0; 32])
126 }
127 }
128 impl std::fmt::Debug for NestedStruct {
fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result129 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
130 f.debug_struct("NestedStruct")
131 .field("a", &self.a())
132 .field("b", &self.b())
133 .field("c", &self.c())
134 .field("d", &self.d())
135 .finish()
136 }
137 }
138
139 impl flatbuffers::SimpleToVerifyInSlice for NestedStruct {}
140 impl flatbuffers::SafeSliceAccess for NestedStruct {}
141 impl<'a> flatbuffers::Follow<'a> for NestedStruct {
142 type Inner = &'a NestedStruct;
143 #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner144 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
145 <&'a NestedStruct>::follow(buf, loc)
146 }
147 }
148 impl<'a> flatbuffers::Follow<'a> for &'a NestedStruct {
149 type Inner = &'a NestedStruct;
150 #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner151 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
152 flatbuffers::follow_cast_ref::<NestedStruct>(buf, loc)
153 }
154 }
155 impl<'b> flatbuffers::Push for NestedStruct {
156 type Output = NestedStruct;
157 #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])158 fn push(&self, dst: &mut [u8], _rest: &[u8]) {
159 let src = unsafe {
160 ::std::slice::from_raw_parts(self as *const NestedStruct as *const u8, Self::size())
161 };
162 dst.copy_from_slice(src);
163 }
164 }
165 impl<'b> flatbuffers::Push for &'b NestedStruct {
166 type Output = NestedStruct;
167
168 #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])169 fn push(&self, dst: &mut [u8], _rest: &[u8]) {
170 let src = unsafe {
171 ::std::slice::from_raw_parts(*self as *const NestedStruct as *const u8, Self::size())
172 };
173 dst.copy_from_slice(src);
174 }
175 }
176
177 impl<'a> flatbuffers::Verifiable for NestedStruct {
178 #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>179 fn run_verifier(
180 v: &mut flatbuffers::Verifier, pos: usize
181 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
182 use self::flatbuffers::Verifiable;
183 v.in_buffer::<Self>(pos)
184 }
185 }
186 impl<'a> NestedStruct {
187 #[allow(clippy::too_many_arguments)]
new( a: &[i32; 2], b: TestEnum, c: &[TestEnum; 2], d: &[i64; 2], ) -> Self188 pub fn new(
189 a: &[i32; 2],
190 b: TestEnum,
191 c: &[TestEnum; 2],
192 d: &[i64; 2],
193 ) -> Self {
194 let mut s = Self([0; 32]);
195 s.set_a(&a);
196 s.set_b(b);
197 s.set_c(&c);
198 s.set_d(&d);
199 s
200 }
201
a(&'a self) -> flatbuffers::Array<'a, i32, 2>202 pub fn a(&'a self) -> flatbuffers::Array<'a, i32, 2> {
203 flatbuffers::Array::follow(&self.0, 0)
204 }
205
set_a(&mut self, items: &[i32; 2])206 pub fn set_a(&mut self, items: &[i32; 2]) {
207 flatbuffers::emplace_scalar_array(&mut self.0, 0, items);
208 }
209
b(&self) -> TestEnum210 pub fn b(&self) -> TestEnum {
211 let mut mem = core::mem::MaybeUninit::<TestEnum>::uninit();
212 unsafe {
213 core::ptr::copy_nonoverlapping(
214 self.0[8..].as_ptr(),
215 mem.as_mut_ptr() as *mut u8,
216 core::mem::size_of::<TestEnum>(),
217 );
218 mem.assume_init()
219 }.from_little_endian()
220 }
221
set_b(&mut self, x: TestEnum)222 pub fn set_b(&mut self, x: TestEnum) {
223 let x_le = x.to_little_endian();
224 unsafe {
225 core::ptr::copy_nonoverlapping(
226 &x_le as *const TestEnum as *const u8,
227 self.0[8..].as_mut_ptr(),
228 core::mem::size_of::<TestEnum>(),
229 );
230 }
231 }
232
c(&'a self) -> flatbuffers::Array<'a, TestEnum, 2>233 pub fn c(&'a self) -> flatbuffers::Array<'a, TestEnum, 2> {
234 flatbuffers::Array::follow(&self.0, 9)
235 }
236
set_c(&mut self, x: &[TestEnum; 2])237 pub fn set_c(&mut self, x: &[TestEnum; 2]) {
238 unsafe {
239 std::ptr::copy(
240 x.as_ptr() as *const u8,
241 self.0.as_mut_ptr().add(9),
242 2,
243 );
244 }
245 }
246
d(&'a self) -> flatbuffers::Array<'a, i64, 2>247 pub fn d(&'a self) -> flatbuffers::Array<'a, i64, 2> {
248 flatbuffers::Array::follow(&self.0, 16)
249 }
250
set_d(&mut self, items: &[i64; 2])251 pub fn set_d(&mut self, items: &[i64; 2]) {
252 flatbuffers::emplace_scalar_array(&mut self.0, 16, items);
253 }
254
unpack(&self) -> NestedStructT255 pub fn unpack(&self) -> NestedStructT {
256 NestedStructT {
257 a: self.a().into(),
258 b: self.b(),
259 c: self.c().into(),
260 d: self.d().into(),
261 }
262 }
263 }
264
265 #[derive(Debug, Clone, PartialEq, Default)]
266 pub struct NestedStructT {
267 pub a: [i32; 2],
268 pub b: TestEnum,
269 pub c: [TestEnum; 2],
270 pub d: [i64; 2],
271 }
272 impl NestedStructT {
pack(&self) -> NestedStruct273 pub fn pack(&self) -> NestedStruct {
274 NestedStruct::new(
275 &self.a,
276 self.b,
277 &self.c,
278 &self.d,
279 )
280 }
281 }
282
283 // struct ArrayStruct, aligned to 8
284 #[repr(transparent)]
285 #[derive(Clone, Copy, PartialEq)]
286 pub struct ArrayStruct(pub [u8; 160]);
287 impl Default for ArrayStruct {
default() -> Self288 fn default() -> Self {
289 Self([0; 160])
290 }
291 }
292 impl std::fmt::Debug for ArrayStruct {
fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result293 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
294 f.debug_struct("ArrayStruct")
295 .field("a", &self.a())
296 .field("b", &self.b())
297 .field("c", &self.c())
298 .field("d", &self.d())
299 .field("e", &self.e())
300 .field("f", &self.f())
301 .finish()
302 }
303 }
304
305 impl flatbuffers::SimpleToVerifyInSlice for ArrayStruct {}
306 impl flatbuffers::SafeSliceAccess for ArrayStruct {}
307 impl<'a> flatbuffers::Follow<'a> for ArrayStruct {
308 type Inner = &'a ArrayStruct;
309 #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner310 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
311 <&'a ArrayStruct>::follow(buf, loc)
312 }
313 }
314 impl<'a> flatbuffers::Follow<'a> for &'a ArrayStruct {
315 type Inner = &'a ArrayStruct;
316 #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner317 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
318 flatbuffers::follow_cast_ref::<ArrayStruct>(buf, loc)
319 }
320 }
321 impl<'b> flatbuffers::Push for ArrayStruct {
322 type Output = ArrayStruct;
323 #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])324 fn push(&self, dst: &mut [u8], _rest: &[u8]) {
325 let src = unsafe {
326 ::std::slice::from_raw_parts(self as *const ArrayStruct as *const u8, Self::size())
327 };
328 dst.copy_from_slice(src);
329 }
330 }
331 impl<'b> flatbuffers::Push for &'b ArrayStruct {
332 type Output = ArrayStruct;
333
334 #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])335 fn push(&self, dst: &mut [u8], _rest: &[u8]) {
336 let src = unsafe {
337 ::std::slice::from_raw_parts(*self as *const ArrayStruct as *const u8, Self::size())
338 };
339 dst.copy_from_slice(src);
340 }
341 }
342
343 impl<'a> flatbuffers::Verifiable for ArrayStruct {
344 #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>345 fn run_verifier(
346 v: &mut flatbuffers::Verifier, pos: usize
347 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
348 use self::flatbuffers::Verifiable;
349 v.in_buffer::<Self>(pos)
350 }
351 }
352 impl<'a> ArrayStruct {
353 #[allow(clippy::too_many_arguments)]
new( a: f32, b: &[i32; 15], c: i8, d: &[NestedStruct; 2], e: i32, f: &[i64; 2], ) -> Self354 pub fn new(
355 a: f32,
356 b: &[i32; 15],
357 c: i8,
358 d: &[NestedStruct; 2],
359 e: i32,
360 f: &[i64; 2],
361 ) -> Self {
362 let mut s = Self([0; 160]);
363 s.set_a(a);
364 s.set_b(&b);
365 s.set_c(c);
366 s.set_d(&d);
367 s.set_e(e);
368 s.set_f(&f);
369 s
370 }
371
a(&self) -> f32372 pub fn a(&self) -> f32 {
373 let mut mem = core::mem::MaybeUninit::<f32>::uninit();
374 unsafe {
375 core::ptr::copy_nonoverlapping(
376 self.0[0..].as_ptr(),
377 mem.as_mut_ptr() as *mut u8,
378 core::mem::size_of::<f32>(),
379 );
380 mem.assume_init()
381 }.from_little_endian()
382 }
383
set_a(&mut self, x: f32)384 pub fn set_a(&mut self, x: f32) {
385 let x_le = x.to_little_endian();
386 unsafe {
387 core::ptr::copy_nonoverlapping(
388 &x_le as *const f32 as *const u8,
389 self.0[0..].as_mut_ptr(),
390 core::mem::size_of::<f32>(),
391 );
392 }
393 }
394
b(&'a self) -> flatbuffers::Array<'a, i32, 15>395 pub fn b(&'a self) -> flatbuffers::Array<'a, i32, 15> {
396 flatbuffers::Array::follow(&self.0, 4)
397 }
398
set_b(&mut self, items: &[i32; 15])399 pub fn set_b(&mut self, items: &[i32; 15]) {
400 flatbuffers::emplace_scalar_array(&mut self.0, 4, items);
401 }
402
c(&self) -> i8403 pub fn c(&self) -> i8 {
404 let mut mem = core::mem::MaybeUninit::<i8>::uninit();
405 unsafe {
406 core::ptr::copy_nonoverlapping(
407 self.0[64..].as_ptr(),
408 mem.as_mut_ptr() as *mut u8,
409 core::mem::size_of::<i8>(),
410 );
411 mem.assume_init()
412 }.from_little_endian()
413 }
414
set_c(&mut self, x: i8)415 pub fn set_c(&mut self, x: i8) {
416 let x_le = x.to_little_endian();
417 unsafe {
418 core::ptr::copy_nonoverlapping(
419 &x_le as *const i8 as *const u8,
420 self.0[64..].as_mut_ptr(),
421 core::mem::size_of::<i8>(),
422 );
423 }
424 }
425
d(&'a self) -> flatbuffers::Array<'a, NestedStruct, 2>426 pub fn d(&'a self) -> flatbuffers::Array<'a, NestedStruct, 2> {
427 flatbuffers::Array::follow(&self.0, 72)
428 }
429
set_d(&mut self, x: &[NestedStruct; 2])430 pub fn set_d(&mut self, x: &[NestedStruct; 2]) {
431 unsafe {
432 std::ptr::copy(
433 x.as_ptr() as *const u8,
434 self.0.as_mut_ptr().add(72),
435 64,
436 );
437 }
438 }
439
e(&self) -> i32440 pub fn e(&self) -> i32 {
441 let mut mem = core::mem::MaybeUninit::<i32>::uninit();
442 unsafe {
443 core::ptr::copy_nonoverlapping(
444 self.0[136..].as_ptr(),
445 mem.as_mut_ptr() as *mut u8,
446 core::mem::size_of::<i32>(),
447 );
448 mem.assume_init()
449 }.from_little_endian()
450 }
451
set_e(&mut self, x: i32)452 pub fn set_e(&mut self, x: i32) {
453 let x_le = x.to_little_endian();
454 unsafe {
455 core::ptr::copy_nonoverlapping(
456 &x_le as *const i32 as *const u8,
457 self.0[136..].as_mut_ptr(),
458 core::mem::size_of::<i32>(),
459 );
460 }
461 }
462
f(&'a self) -> flatbuffers::Array<'a, i64, 2>463 pub fn f(&'a self) -> flatbuffers::Array<'a, i64, 2> {
464 flatbuffers::Array::follow(&self.0, 144)
465 }
466
set_f(&mut self, items: &[i64; 2])467 pub fn set_f(&mut self, items: &[i64; 2]) {
468 flatbuffers::emplace_scalar_array(&mut self.0, 144, items);
469 }
470
unpack(&self) -> ArrayStructT471 pub fn unpack(&self) -> ArrayStructT {
472 ArrayStructT {
473 a: self.a(),
474 b: self.b().into(),
475 c: self.c(),
476 d: { let d = self.d(); flatbuffers::array_init(|i| d.get(i).unpack()) },
477 e: self.e(),
478 f: self.f().into(),
479 }
480 }
481 }
482
483 #[derive(Debug, Clone, PartialEq, Default)]
484 pub struct ArrayStructT {
485 pub a: f32,
486 pub b: [i32; 15],
487 pub c: i8,
488 pub d: [NestedStructT; 2],
489 pub e: i32,
490 pub f: [i64; 2],
491 }
492 impl ArrayStructT {
pack(&self) -> ArrayStruct493 pub fn pack(&self) -> ArrayStruct {
494 ArrayStruct::new(
495 self.a,
496 &self.b,
497 self.c,
498 &flatbuffers::array_init(|i| self.d[i].pack()),
499 self.e,
500 &self.f,
501 )
502 }
503 }
504
505 pub enum ArrayTableOffset {}
506 #[derive(Copy, Clone, PartialEq)]
507
508 pub struct ArrayTable<'a> {
509 pub _tab: flatbuffers::Table<'a>,
510 }
511
512 impl<'a> flatbuffers::Follow<'a> for ArrayTable<'a> {
513 type Inner = ArrayTable<'a>;
514 #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner515 fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
516 Self { _tab: flatbuffers::Table { buf, loc } }
517 }
518 }
519
520 impl<'a> ArrayTable<'a> {
521 #[inline]
init_from_table(table: flatbuffers::Table<'a>) -> Self522 pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
523 ArrayTable { _tab: table }
524 }
525 #[allow(unused_mut)]
create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args ArrayTableArgs<'args>) -> flatbuffers::WIPOffset<ArrayTable<'bldr>>526 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
527 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
528 args: &'args ArrayTableArgs<'args>) -> flatbuffers::WIPOffset<ArrayTable<'bldr>> {
529 let mut builder = ArrayTableBuilder::new(_fbb);
530 if let Some(x) = args.a { builder.add_a(x); }
531 builder.finish()
532 }
533
unpack(&self) -> ArrayTableT534 pub fn unpack(&self) -> ArrayTableT {
535 let a = self.a().map(|x| {
536 x.unpack()
537 });
538 ArrayTableT {
539 a,
540 }
541 }
542 pub const VT_A: flatbuffers::VOffsetT = 4;
543
544 #[inline]
a(&self) -> Option<&'a ArrayStruct>545 pub fn a(&self) -> Option<&'a ArrayStruct> {
546 self._tab.get::<ArrayStruct>(ArrayTable::VT_A, None)
547 }
548 }
549
550 impl flatbuffers::Verifiable for ArrayTable<'_> {
551 #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>552 fn run_verifier(
553 v: &mut flatbuffers::Verifier, pos: usize
554 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
555 use self::flatbuffers::Verifiable;
556 v.visit_table(pos)?
557 .visit_field::<ArrayStruct>(&"a", Self::VT_A, false)?
558 .finish();
559 Ok(())
560 }
561 }
562 pub struct ArrayTableArgs<'a> {
563 pub a: Option<&'a ArrayStruct>,
564 }
565 impl<'a> Default for ArrayTableArgs<'a> {
566 #[inline]
default() -> Self567 fn default() -> Self {
568 ArrayTableArgs {
569 a: None,
570 }
571 }
572 }
573 pub struct ArrayTableBuilder<'a: 'b, 'b> {
574 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
575 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
576 }
577 impl<'a: 'b, 'b> ArrayTableBuilder<'a, 'b> {
578 #[inline]
add_a(&mut self, a: &ArrayStruct)579 pub fn add_a(&mut self, a: &ArrayStruct) {
580 self.fbb_.push_slot_always::<&ArrayStruct>(ArrayTable::VT_A, a);
581 }
582 #[inline]
new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ArrayTableBuilder<'a, 'b>583 pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ArrayTableBuilder<'a, 'b> {
584 let start = _fbb.start_table();
585 ArrayTableBuilder {
586 fbb_: _fbb,
587 start_: start,
588 }
589 }
590 #[inline]
finish(self) -> flatbuffers::WIPOffset<ArrayTable<'a>>591 pub fn finish(self) -> flatbuffers::WIPOffset<ArrayTable<'a>> {
592 let o = self.fbb_.end_table(self.start_);
593 flatbuffers::WIPOffset::new(o.value())
594 }
595 }
596
597 impl std::fmt::Debug for ArrayTable<'_> {
fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result598 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
599 let mut ds = f.debug_struct("ArrayTable");
600 ds.field("a", &self.a());
601 ds.finish()
602 }
603 }
604 #[non_exhaustive]
605 #[derive(Debug, Clone, PartialEq)]
606 pub struct ArrayTableT {
607 pub a: Option<ArrayStructT>,
608 }
609 impl Default for ArrayTableT {
default() -> Self610 fn default() -> Self {
611 Self {
612 a: None,
613 }
614 }
615 }
616 impl ArrayTableT {
pack<'b>( &self, _fbb: &mut flatbuffers::FlatBufferBuilder<'b> ) -> flatbuffers::WIPOffset<ArrayTable<'b>>617 pub fn pack<'b>(
618 &self,
619 _fbb: &mut flatbuffers::FlatBufferBuilder<'b>
620 ) -> flatbuffers::WIPOffset<ArrayTable<'b>> {
621 let a_tmp = self.a.as_ref().map(|x| x.pack());
622 let a = a_tmp.as_ref();
623 ArrayTable::create(_fbb, &ArrayTableArgs{
624 a,
625 })
626 }
627 }
628 #[inline]
629 #[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
get_root_as_array_table<'a>(buf: &'a [u8]) -> ArrayTable<'a>630 pub fn get_root_as_array_table<'a>(buf: &'a [u8]) -> ArrayTable<'a> {
631 unsafe { flatbuffers::root_unchecked::<ArrayTable<'a>>(buf) }
632 }
633
634 #[inline]
635 #[deprecated(since="2.0.0", note="Deprecated in favor of `root_as...` methods.")]
get_size_prefixed_root_as_array_table<'a>(buf: &'a [u8]) -> ArrayTable<'a>636 pub fn get_size_prefixed_root_as_array_table<'a>(buf: &'a [u8]) -> ArrayTable<'a> {
637 unsafe { flatbuffers::size_prefixed_root_unchecked::<ArrayTable<'a>>(buf) }
638 }
639
640 #[inline]
641 /// Verifies that a buffer of bytes contains a `ArrayTable`
642 /// and returns it.
643 /// Note that verification is still experimental and may not
644 /// catch every error, or be maximally performant. For the
645 /// previous, unchecked, behavior use
646 /// `root_as_array_table_unchecked`.
root_as_array_table(buf: &[u8]) -> Result<ArrayTable, flatbuffers::InvalidFlatbuffer>647 pub fn root_as_array_table(buf: &[u8]) -> Result<ArrayTable, flatbuffers::InvalidFlatbuffer> {
648 flatbuffers::root::<ArrayTable>(buf)
649 }
650 #[inline]
651 /// Verifies that a buffer of bytes contains a size prefixed
652 /// `ArrayTable` and returns it.
653 /// Note that verification is still experimental and may not
654 /// catch every error, or be maximally performant. For the
655 /// previous, unchecked, behavior use
656 /// `size_prefixed_root_as_array_table_unchecked`.
size_prefixed_root_as_array_table(buf: &[u8]) -> Result<ArrayTable, flatbuffers::InvalidFlatbuffer>657 pub fn size_prefixed_root_as_array_table(buf: &[u8]) -> Result<ArrayTable, flatbuffers::InvalidFlatbuffer> {
658 flatbuffers::size_prefixed_root::<ArrayTable>(buf)
659 }
660 #[inline]
661 /// Verifies, with the given options, that a buffer of bytes
662 /// contains a `ArrayTable` and returns it.
663 /// Note that verification is still experimental and may not
664 /// catch every error, or be maximally performant. For the
665 /// previous, unchecked, behavior use
666 /// `root_as_array_table_unchecked`.
root_as_array_table_with_opts<'b, 'o>( opts: &'o flatbuffers::VerifierOptions, buf: &'b [u8], ) -> Result<ArrayTable<'b>, flatbuffers::InvalidFlatbuffer>667 pub fn root_as_array_table_with_opts<'b, 'o>(
668 opts: &'o flatbuffers::VerifierOptions,
669 buf: &'b [u8],
670 ) -> Result<ArrayTable<'b>, flatbuffers::InvalidFlatbuffer> {
671 flatbuffers::root_with_opts::<ArrayTable<'b>>(opts, buf)
672 }
673 #[inline]
674 /// Verifies, with the given verifier options, that a buffer of
675 /// bytes contains a size prefixed `ArrayTable` and returns
676 /// it. Note that verification is still experimental and may not
677 /// catch every error, or be maximally performant. For the
678 /// previous, unchecked, behavior use
679 /// `root_as_array_table_unchecked`.
size_prefixed_root_as_array_table_with_opts<'b, 'o>( opts: &'o flatbuffers::VerifierOptions, buf: &'b [u8], ) -> Result<ArrayTable<'b>, flatbuffers::InvalidFlatbuffer>680 pub fn size_prefixed_root_as_array_table_with_opts<'b, 'o>(
681 opts: &'o flatbuffers::VerifierOptions,
682 buf: &'b [u8],
683 ) -> Result<ArrayTable<'b>, flatbuffers::InvalidFlatbuffer> {
684 flatbuffers::size_prefixed_root_with_opts::<ArrayTable<'b>>(opts, buf)
685 }
686 #[inline]
687 /// Assumes, without verification, that a buffer of bytes contains a ArrayTable and returns it.
688 /// # Safety
689 /// Callers must trust the given bytes do indeed contain a valid `ArrayTable`.
root_as_array_table_unchecked(buf: &[u8]) -> ArrayTable690 pub unsafe fn root_as_array_table_unchecked(buf: &[u8]) -> ArrayTable {
691 flatbuffers::root_unchecked::<ArrayTable>(buf)
692 }
693 #[inline]
694 /// Assumes, without verification, that a buffer of bytes contains a size prefixed ArrayTable and returns it.
695 /// # Safety
696 /// Callers must trust the given bytes do indeed contain a valid size prefixed `ArrayTable`.
size_prefixed_root_as_array_table_unchecked(buf: &[u8]) -> ArrayTable697 pub unsafe fn size_prefixed_root_as_array_table_unchecked(buf: &[u8]) -> ArrayTable {
698 flatbuffers::size_prefixed_root_unchecked::<ArrayTable>(buf)
699 }
700 pub const ARRAY_TABLE_IDENTIFIER: &str = "ARRT";
701
702 #[inline]
array_table_buffer_has_identifier(buf: &[u8]) -> bool703 pub fn array_table_buffer_has_identifier(buf: &[u8]) -> bool {
704 flatbuffers::buffer_has_identifier(buf, ARRAY_TABLE_IDENTIFIER, false)
705 }
706
707 #[inline]
array_table_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool708 pub fn array_table_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool {
709 flatbuffers::buffer_has_identifier(buf, ARRAY_TABLE_IDENTIFIER, true)
710 }
711
712 pub const ARRAY_TABLE_EXTENSION: &str = "mon";
713
714 #[inline]
finish_array_table_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<ArrayTable<'a>>)715 pub fn finish_array_table_buffer<'a, 'b>(
716 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
717 root: flatbuffers::WIPOffset<ArrayTable<'a>>) {
718 fbb.finish(root, Some(ARRAY_TABLE_IDENTIFIER));
719 }
720
721 #[inline]
finish_size_prefixed_array_table_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<ArrayTable<'a>>)722 pub fn finish_size_prefixed_array_table_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<ArrayTable<'a>>) {
723 fbb.finish_size_prefixed(root, Some(ARRAY_TABLE_IDENTIFIER));
724 }
725 } // pub mod Example
726 } // pub mod MyGame
727
728