• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // automatically generated by the FlatBuffers compiler, do not modify
2 // @generated
3 extern crate alloc;
4 extern crate flatbuffers;
5 use alloc::boxed::Box;
6 use alloc::string::{String, ToString};
7 use alloc::vec::Vec;
8 use core::mem;
9 use core::cmp::Ordering;
10 use self::flatbuffers::{EndianScalar, Follow};
11 use super::*;
12 // struct Vec3, aligned to 8
13 #[repr(transparent)]
14 #[derive(Clone, Copy, PartialEq)]
15 pub struct Vec3(pub [u8; 32]);
16 impl Default for Vec3 {
default() -> Self17   fn default() -> Self {
18     Self([0; 32])
19   }
20 }
21 impl core::fmt::Debug for Vec3 {
fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22   fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
23     f.debug_struct("Vec3")
24       .field("x", &self.x())
25       .field("y", &self.y())
26       .field("z", &self.z())
27       .field("test1", &self.test1())
28       .field("test2", &self.test2())
29       .field("test3", &self.test3())
30       .finish()
31   }
32 }
33 
34 impl flatbuffers::SimpleToVerifyInSlice for Vec3 {}
35 impl flatbuffers::SafeSliceAccess for Vec3 {}
36 impl<'a> flatbuffers::Follow<'a> for Vec3 {
37   type Inner = &'a Vec3;
38   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner39   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
40     <&'a Vec3>::follow(buf, loc)
41   }
42 }
43 impl<'a> flatbuffers::Follow<'a> for &'a Vec3 {
44   type Inner = &'a Vec3;
45   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner46   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
47     flatbuffers::follow_cast_ref::<Vec3>(buf, loc)
48   }
49 }
50 impl<'b> flatbuffers::Push for Vec3 {
51     type Output = Vec3;
52     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])53     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
54         let src = unsafe {
55             ::core::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size())
56         };
57         dst.copy_from_slice(src);
58     }
59 }
60 impl<'b> flatbuffers::Push for &'b Vec3 {
61     type Output = Vec3;
62 
63     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])64     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
65         let src = unsafe {
66             ::core::slice::from_raw_parts(*self as *const Vec3 as *const u8, Self::size())
67         };
68         dst.copy_from_slice(src);
69     }
70 }
71 
72 impl<'a> flatbuffers::Verifiable for Vec3 {
73   #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>74   fn run_verifier(
75     v: &mut flatbuffers::Verifier, pos: usize
76   ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
77     use self::flatbuffers::Verifiable;
78     v.in_buffer::<Self>(pos)
79   }
80 }
81 
82 impl<'a> Vec3 {
83   #[allow(clippy::too_many_arguments)]
new( x: f32, y: f32, z: f32, test1: f64, test2: Color, test3: &Test, ) -> Self84   pub fn new(
85     x: f32,
86     y: f32,
87     z: f32,
88     test1: f64,
89     test2: Color,
90     test3: &Test,
91   ) -> Self {
92     let mut s = Self([0; 32]);
93     s.set_x(x);
94     s.set_y(y);
95     s.set_z(z);
96     s.set_test1(test1);
97     s.set_test2(test2);
98     s.set_test3(test3);
99     s
100   }
101 
get_fully_qualified_name() -> &'static str102   pub const fn get_fully_qualified_name() -> &'static str {
103     "MyGame.Example.Vec3"
104   }
105 
x(&self) -> f32106   pub fn x(&self) -> f32 {
107     let mut mem = core::mem::MaybeUninit::<f32>::uninit();
108     unsafe {
109       core::ptr::copy_nonoverlapping(
110         self.0[0..].as_ptr(),
111         mem.as_mut_ptr() as *mut u8,
112         core::mem::size_of::<f32>(),
113       );
114       mem.assume_init()
115     }.from_little_endian()
116   }
117 
set_x(&mut self, x: f32)118   pub fn set_x(&mut self, x: f32) {
119     let x_le = x.to_little_endian();
120     unsafe {
121       core::ptr::copy_nonoverlapping(
122         &x_le as *const f32 as *const u8,
123         self.0[0..].as_mut_ptr(),
124         core::mem::size_of::<f32>(),
125       );
126     }
127   }
128 
y(&self) -> f32129   pub fn y(&self) -> f32 {
130     let mut mem = core::mem::MaybeUninit::<f32>::uninit();
131     unsafe {
132       core::ptr::copy_nonoverlapping(
133         self.0[4..].as_ptr(),
134         mem.as_mut_ptr() as *mut u8,
135         core::mem::size_of::<f32>(),
136       );
137       mem.assume_init()
138     }.from_little_endian()
139   }
140 
set_y(&mut self, x: f32)141   pub fn set_y(&mut self, x: f32) {
142     let x_le = x.to_little_endian();
143     unsafe {
144       core::ptr::copy_nonoverlapping(
145         &x_le as *const f32 as *const u8,
146         self.0[4..].as_mut_ptr(),
147         core::mem::size_of::<f32>(),
148       );
149     }
150   }
151 
z(&self) -> f32152   pub fn z(&self) -> f32 {
153     let mut mem = core::mem::MaybeUninit::<f32>::uninit();
154     unsafe {
155       core::ptr::copy_nonoverlapping(
156         self.0[8..].as_ptr(),
157         mem.as_mut_ptr() as *mut u8,
158         core::mem::size_of::<f32>(),
159       );
160       mem.assume_init()
161     }.from_little_endian()
162   }
163 
set_z(&mut self, x: f32)164   pub fn set_z(&mut self, x: f32) {
165     let x_le = x.to_little_endian();
166     unsafe {
167       core::ptr::copy_nonoverlapping(
168         &x_le as *const f32 as *const u8,
169         self.0[8..].as_mut_ptr(),
170         core::mem::size_of::<f32>(),
171       );
172     }
173   }
174 
test1(&self) -> f64175   pub fn test1(&self) -> f64 {
176     let mut mem = core::mem::MaybeUninit::<f64>::uninit();
177     unsafe {
178       core::ptr::copy_nonoverlapping(
179         self.0[16..].as_ptr(),
180         mem.as_mut_ptr() as *mut u8,
181         core::mem::size_of::<f64>(),
182       );
183       mem.assume_init()
184     }.from_little_endian()
185   }
186 
set_test1(&mut self, x: f64)187   pub fn set_test1(&mut self, x: f64) {
188     let x_le = x.to_little_endian();
189     unsafe {
190       core::ptr::copy_nonoverlapping(
191         &x_le as *const f64 as *const u8,
192         self.0[16..].as_mut_ptr(),
193         core::mem::size_of::<f64>(),
194       );
195     }
196   }
197 
test2(&self) -> Color198   pub fn test2(&self) -> Color {
199     let mut mem = core::mem::MaybeUninit::<Color>::uninit();
200     unsafe {
201       core::ptr::copy_nonoverlapping(
202         self.0[24..].as_ptr(),
203         mem.as_mut_ptr() as *mut u8,
204         core::mem::size_of::<Color>(),
205       );
206       mem.assume_init()
207     }.from_little_endian()
208   }
209 
set_test2(&mut self, x: Color)210   pub fn set_test2(&mut self, x: Color) {
211     let x_le = x.to_little_endian();
212     unsafe {
213       core::ptr::copy_nonoverlapping(
214         &x_le as *const Color as *const u8,
215         self.0[24..].as_mut_ptr(),
216         core::mem::size_of::<Color>(),
217       );
218     }
219   }
220 
test3(&self) -> &Test221   pub fn test3(&self) -> &Test {
222     unsafe { &*(self.0[26..].as_ptr() as *const Test) }
223   }
224 
225   #[allow(clippy::identity_op)]
set_test3(&mut self, x: &Test)226   pub fn set_test3(&mut self, x: &Test) {
227     self.0[26..26 + 4].copy_from_slice(&x.0)
228   }
229 
unpack(&self) -> Vec3T230   pub fn unpack(&self) -> Vec3T {
231     Vec3T {
232       x: self.x(),
233       y: self.y(),
234       z: self.z(),
235       test1: self.test1(),
236       test2: self.test2(),
237       test3: self.test3().unpack(),
238     }
239   }
240 }
241 
242 #[derive(Debug, Clone, PartialEq, Default)]
243 pub struct Vec3T {
244   pub x: f32,
245   pub y: f32,
246   pub z: f32,
247   pub test1: f64,
248   pub test2: Color,
249   pub test3: TestT,
250 }
251 impl Vec3T {
pack(&self) -> Vec3252   pub fn pack(&self) -> Vec3 {
253     Vec3::new(
254       self.x,
255       self.y,
256       self.z,
257       self.test1,
258       self.test2,
259       &self.test3.pack(),
260     )
261   }
262 }
263 
264