• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // automatically generated by the FlatBuffers compiler, do not modify
2 // @generated
3 extern crate alloc;
4 extern crate flatbuffers;
5 use alloc::boxed::Box;
6 use alloc::string::{String, ToString};
7 use alloc::vec::Vec;
8 use core::mem;
9 use core::cmp::Ordering;
10 extern crate serde;
11 use self::serde::ser::{Serialize, Serializer, SerializeStruct};
12 use self::flatbuffers::{EndianScalar, Follow};
13 use super::*;
14 // struct Test, aligned to 2
15 #[repr(transparent)]
16 #[derive(Clone, Copy, PartialEq)]
17 pub struct Test(pub [u8; 4]);
18 impl Default for Test {
default() -> Self19   fn default() -> Self {
20     Self([0; 4])
21   }
22 }
23 impl core::fmt::Debug for Test {
fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result24   fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
25     f.debug_struct("Test")
26       .field("a", &self.a())
27       .field("b", &self.b())
28       .finish()
29   }
30 }
31 
32 impl flatbuffers::SimpleToVerifyInSlice for Test {}
33 impl<'a> flatbuffers::Follow<'a> for Test {
34   type Inner = &'a Test;
35   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner36   unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
37     <&'a Test>::follow(buf, loc)
38   }
39 }
40 impl<'a> flatbuffers::Follow<'a> for &'a Test {
41   type Inner = &'a Test;
42   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner43   unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
44     flatbuffers::follow_cast_ref::<Test>(buf, loc)
45   }
46 }
47 impl<'b> flatbuffers::Push for Test {
48     type Output = Test;
49     #[inline]
push(&self, dst: &mut [u8], _written_len: usize)50     unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
51         let src = ::core::slice::from_raw_parts(self as *const Test as *const u8, Self::size());
52         dst.copy_from_slice(src);
53     }
54 }
55 
56 impl<'a> flatbuffers::Verifiable for Test {
57   #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>58   fn run_verifier(
59     v: &mut flatbuffers::Verifier, pos: usize
60   ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
61     use self::flatbuffers::Verifiable;
62     v.in_buffer::<Self>(pos)
63   }
64 }
65 
66 impl Serialize for Test {
serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer,67   fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
68   where
69     S: Serializer,
70   {
71     let mut s = serializer.serialize_struct("Test", 2)?;
72       s.serialize_field("a", &self.a())?;
73       s.serialize_field("b", &self.b())?;
74     s.end()
75   }
76 }
77 
78 impl<'a> Test {
79   #[allow(clippy::too_many_arguments)]
new( a: i16, b: i8, ) -> Self80   pub fn new(
81     a: i16,
82     b: i8,
83   ) -> Self {
84     let mut s = Self([0; 4]);
85     s.set_a(a);
86     s.set_b(b);
87     s
88   }
89 
get_fully_qualified_name() -> &'static str90   pub const fn get_fully_qualified_name() -> &'static str {
91     "MyGame.Example.Test"
92   }
93 
a(&self) -> i1694   pub fn a(&self) -> i16 {
95     let mut mem = core::mem::MaybeUninit::<<i16 as EndianScalar>::Scalar>::uninit();
96     // Safety:
97     // Created from a valid Table for this object
98     // Which contains a valid value in this slot
99     EndianScalar::from_little_endian(unsafe {
100       core::ptr::copy_nonoverlapping(
101         self.0[0..].as_ptr(),
102         mem.as_mut_ptr() as *mut u8,
103         core::mem::size_of::<<i16 as EndianScalar>::Scalar>(),
104       );
105       mem.assume_init()
106     })
107   }
108 
set_a(&mut self, x: i16)109   pub fn set_a(&mut self, x: i16) {
110     let x_le = x.to_little_endian();
111     // Safety:
112     // Created from a valid Table for this object
113     // Which contains a valid value in this slot
114     unsafe {
115       core::ptr::copy_nonoverlapping(
116         &x_le as *const _ as *const u8,
117         self.0[0..].as_mut_ptr(),
118         core::mem::size_of::<<i16 as EndianScalar>::Scalar>(),
119       );
120     }
121   }
122 
b(&self) -> i8123   pub fn b(&self) -> i8 {
124     let mut mem = core::mem::MaybeUninit::<<i8 as EndianScalar>::Scalar>::uninit();
125     // Safety:
126     // Created from a valid Table for this object
127     // Which contains a valid value in this slot
128     EndianScalar::from_little_endian(unsafe {
129       core::ptr::copy_nonoverlapping(
130         self.0[2..].as_ptr(),
131         mem.as_mut_ptr() as *mut u8,
132         core::mem::size_of::<<i8 as EndianScalar>::Scalar>(),
133       );
134       mem.assume_init()
135     })
136   }
137 
set_b(&mut self, x: i8)138   pub fn set_b(&mut self, x: i8) {
139     let x_le = x.to_little_endian();
140     // Safety:
141     // Created from a valid Table for this object
142     // Which contains a valid value in this slot
143     unsafe {
144       core::ptr::copy_nonoverlapping(
145         &x_le as *const _ as *const u8,
146         self.0[2..].as_mut_ptr(),
147         core::mem::size_of::<<i8 as EndianScalar>::Scalar>(),
148       );
149     }
150   }
151 
unpack(&self) -> TestT152   pub fn unpack(&self) -> TestT {
153     TestT {
154       a: self.a(),
155       b: self.b(),
156     }
157   }
158 }
159 
160 #[derive(Debug, Clone, PartialEq, Default)]
161 pub struct TestT {
162   pub a: i16,
163   pub b: i8,
164 }
165 impl TestT {
pack(&self) -> Test166   pub fn pack(&self) -> Test {
167     Test::new(
168       self.a,
169       self.b,
170     )
171   }
172 }
173 
174