• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // automatically generated by the FlatBuffers compiler, do not modify
2 // @generated
3 extern crate alloc;
4 extern crate flatbuffers;
5 use alloc::boxed::Box;
6 use alloc::string::{String, ToString};
7 use alloc::vec::Vec;
8 use core::mem;
9 use core::cmp::Ordering;
10 use self::flatbuffers::{EndianScalar, Follow};
11 use super::*;
12 // struct ArrayStruct, aligned to 8
13 #[repr(transparent)]
14 #[derive(Clone, Copy, PartialEq)]
15 pub struct ArrayStruct(pub [u8; 160]);
16 impl Default for ArrayStruct {
default() -> Self17   fn default() -> Self {
18     Self([0; 160])
19   }
20 }
21 impl core::fmt::Debug for ArrayStruct {
fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22   fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
23     f.debug_struct("ArrayStruct")
24       .field("a", &self.a())
25       .field("b", &self.b())
26       .field("c", &self.c())
27       .field("d", &self.d())
28       .field("e", &self.e())
29       .field("f", &self.f())
30       .finish()
31   }
32 }
33 
34 impl flatbuffers::SimpleToVerifyInSlice for ArrayStruct {}
35 impl flatbuffers::SafeSliceAccess for ArrayStruct {}
36 impl<'a> flatbuffers::Follow<'a> for ArrayStruct {
37   type Inner = &'a ArrayStruct;
38   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner39   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
40     <&'a ArrayStruct>::follow(buf, loc)
41   }
42 }
43 impl<'a> flatbuffers::Follow<'a> for &'a ArrayStruct {
44   type Inner = &'a ArrayStruct;
45   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner46   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
47     flatbuffers::follow_cast_ref::<ArrayStruct>(buf, loc)
48   }
49 }
50 impl<'b> flatbuffers::Push for ArrayStruct {
51     type Output = ArrayStruct;
52     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])53     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
54         let src = unsafe {
55             ::core::slice::from_raw_parts(self as *const ArrayStruct as *const u8, Self::size())
56         };
57         dst.copy_from_slice(src);
58     }
59 }
60 impl<'b> flatbuffers::Push for &'b ArrayStruct {
61     type Output = ArrayStruct;
62 
63     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])64     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
65         let src = unsafe {
66             ::core::slice::from_raw_parts(*self as *const ArrayStruct as *const u8, Self::size())
67         };
68         dst.copy_from_slice(src);
69     }
70 }
71 
72 impl<'a> flatbuffers::Verifiable for ArrayStruct {
73   #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>74   fn run_verifier(
75     v: &mut flatbuffers::Verifier, pos: usize
76   ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
77     use self::flatbuffers::Verifiable;
78     v.in_buffer::<Self>(pos)
79   }
80 }
81 
82 impl<'a> ArrayStruct {
83   #[allow(clippy::too_many_arguments)]
new( a: f32, b: &[i32; 15], c: i8, d: &[NestedStruct; 2], e: i32, f: &[i64; 2], ) -> Self84   pub fn new(
85     a: f32,
86     b: &[i32; 15],
87     c: i8,
88     d: &[NestedStruct; 2],
89     e: i32,
90     f: &[i64; 2],
91   ) -> Self {
92     let mut s = Self([0; 160]);
93     s.set_a(a);
94     s.set_b(b);
95     s.set_c(c);
96     s.set_d(d);
97     s.set_e(e);
98     s.set_f(f);
99     s
100   }
101 
get_fully_qualified_name() -> &'static str102   pub const fn get_fully_qualified_name() -> &'static str {
103     "MyGame.Example.ArrayStruct"
104   }
105 
a(&self) -> f32106   pub fn a(&self) -> f32 {
107     let mut mem = core::mem::MaybeUninit::<f32>::uninit();
108     unsafe {
109       core::ptr::copy_nonoverlapping(
110         self.0[0..].as_ptr(),
111         mem.as_mut_ptr() as *mut u8,
112         core::mem::size_of::<f32>(),
113       );
114       mem.assume_init()
115     }.from_little_endian()
116   }
117 
set_a(&mut self, x: f32)118   pub fn set_a(&mut self, x: f32) {
119     let x_le = x.to_little_endian();
120     unsafe {
121       core::ptr::copy_nonoverlapping(
122         &x_le as *const f32 as *const u8,
123         self.0[0..].as_mut_ptr(),
124         core::mem::size_of::<f32>(),
125       );
126     }
127   }
128 
b(&'a self) -> flatbuffers::Array<'a, i32, 15>129   pub fn b(&'a self) -> flatbuffers::Array<'a, i32, 15> {
130     flatbuffers::Array::follow(&self.0, 4)
131   }
132 
set_b(&mut self, items: &[i32; 15])133   pub fn set_b(&mut self, items: &[i32; 15]) {
134     flatbuffers::emplace_scalar_array(&mut self.0, 4, items);
135   }
136 
c(&self) -> i8137   pub fn c(&self) -> i8 {
138     let mut mem = core::mem::MaybeUninit::<i8>::uninit();
139     unsafe {
140       core::ptr::copy_nonoverlapping(
141         self.0[64..].as_ptr(),
142         mem.as_mut_ptr() as *mut u8,
143         core::mem::size_of::<i8>(),
144       );
145       mem.assume_init()
146     }.from_little_endian()
147   }
148 
set_c(&mut self, x: i8)149   pub fn set_c(&mut self, x: i8) {
150     let x_le = x.to_little_endian();
151     unsafe {
152       core::ptr::copy_nonoverlapping(
153         &x_le as *const i8 as *const u8,
154         self.0[64..].as_mut_ptr(),
155         core::mem::size_of::<i8>(),
156       );
157     }
158   }
159 
d(&'a self) -> flatbuffers::Array<'a, NestedStruct, 2>160   pub fn d(&'a self) -> flatbuffers::Array<'a, NestedStruct, 2> {
161     flatbuffers::Array::follow(&self.0, 72)
162   }
163 
set_d(&mut self, x: &[NestedStruct; 2])164   pub fn set_d(&mut self, x: &[NestedStruct; 2]) {
165     unsafe {
166       core::ptr::copy(
167         x.as_ptr() as *const u8,
168         self.0.as_mut_ptr().add(72),
169         64,
170       );
171     }
172   }
173 
e(&self) -> i32174   pub fn e(&self) -> i32 {
175     let mut mem = core::mem::MaybeUninit::<i32>::uninit();
176     unsafe {
177       core::ptr::copy_nonoverlapping(
178         self.0[136..].as_ptr(),
179         mem.as_mut_ptr() as *mut u8,
180         core::mem::size_of::<i32>(),
181       );
182       mem.assume_init()
183     }.from_little_endian()
184   }
185 
set_e(&mut self, x: i32)186   pub fn set_e(&mut self, x: i32) {
187     let x_le = x.to_little_endian();
188     unsafe {
189       core::ptr::copy_nonoverlapping(
190         &x_le as *const i32 as *const u8,
191         self.0[136..].as_mut_ptr(),
192         core::mem::size_of::<i32>(),
193       );
194     }
195   }
196 
f(&'a self) -> flatbuffers::Array<'a, i64, 2>197   pub fn f(&'a self) -> flatbuffers::Array<'a, i64, 2> {
198     flatbuffers::Array::follow(&self.0, 144)
199   }
200 
set_f(&mut self, items: &[i64; 2])201   pub fn set_f(&mut self, items: &[i64; 2]) {
202     flatbuffers::emplace_scalar_array(&mut self.0, 144, items);
203   }
204 
unpack(&self) -> ArrayStructT205   pub fn unpack(&self) -> ArrayStructT {
206     ArrayStructT {
207       a: self.a(),
208       b: self.b().into(),
209       c: self.c(),
210       d: { let d = self.d(); flatbuffers::array_init(|i| d.get(i).unpack()) },
211       e: self.e(),
212       f: self.f().into(),
213     }
214   }
215 }
216 
217 #[derive(Debug, Clone, PartialEq, Default)]
218 pub struct ArrayStructT {
219   pub a: f32,
220   pub b: [i32; 15],
221   pub c: i8,
222   pub d: [NestedStructT; 2],
223   pub e: i32,
224   pub f: [i64; 2],
225 }
226 impl ArrayStructT {
pack(&self) -> ArrayStruct227   pub fn pack(&self) -> ArrayStruct {
228     ArrayStruct::new(
229       self.a,
230       &self.b,
231       self.c,
232       &flatbuffers::array_init(|i| self.d[i].pack()),
233       self.e,
234       &self.f,
235     )
236   }
237 }
238 
239