• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // automatically generated by the FlatBuffers compiler, do not modify
2 // @generated
3 extern crate alloc;
4 extern crate flatbuffers;
5 use alloc::boxed::Box;
6 use alloc::string::{String, ToString};
7 use alloc::vec::Vec;
8 use core::mem;
9 use core::cmp::Ordering;
10 use self::flatbuffers::{EndianScalar, Follow};
11 use super::*;
12 // struct Vec3, aligned to 4
13 #[repr(transparent)]
14 #[derive(Clone, Copy, PartialEq)]
15 pub struct Vec3(pub [u8; 12]);
16 impl Default for Vec3 {
default() -> Self17   fn default() -> Self {
18     Self([0; 12])
19   }
20 }
21 impl core::fmt::Debug for Vec3 {
fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22   fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
23     f.debug_struct("Vec3")
24       .field("x", &self.x())
25       .field("y", &self.y())
26       .field("z", &self.z())
27       .finish()
28   }
29 }
30 
31 impl flatbuffers::SimpleToVerifyInSlice for Vec3 {}
32 impl flatbuffers::SafeSliceAccess for Vec3 {}
33 impl<'a> flatbuffers::Follow<'a> for Vec3 {
34   type Inner = &'a Vec3;
35   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner36   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
37     <&'a Vec3>::follow(buf, loc)
38   }
39 }
40 impl<'a> flatbuffers::Follow<'a> for &'a Vec3 {
41   type Inner = &'a Vec3;
42   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner43   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
44     flatbuffers::follow_cast_ref::<Vec3>(buf, loc)
45   }
46 }
47 impl<'b> flatbuffers::Push for Vec3 {
48     type Output = Vec3;
49     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])50     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
51         let src = unsafe {
52             ::core::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size())
53         };
54         dst.copy_from_slice(src);
55     }
56 }
57 impl<'b> flatbuffers::Push for &'b Vec3 {
58     type Output = Vec3;
59 
60     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])61     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
62         let src = unsafe {
63             ::core::slice::from_raw_parts(*self as *const Vec3 as *const u8, Self::size())
64         };
65         dst.copy_from_slice(src);
66     }
67 }
68 
69 impl<'a> flatbuffers::Verifiable for Vec3 {
70   #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>71   fn run_verifier(
72     v: &mut flatbuffers::Verifier, pos: usize
73   ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
74     use self::flatbuffers::Verifiable;
75     v.in_buffer::<Self>(pos)
76   }
77 }
78 
79 impl<'a> Vec3 {
80   #[allow(clippy::too_many_arguments)]
new( x: f32, y: f32, z: f32, ) -> Self81   pub fn new(
82     x: f32,
83     y: f32,
84     z: f32,
85   ) -> Self {
86     let mut s = Self([0; 12]);
87     s.set_x(x);
88     s.set_y(y);
89     s.set_z(z);
90     s
91   }
92 
get_fully_qualified_name() -> &'static str93   pub const fn get_fully_qualified_name() -> &'static str {
94     "MyGame.Sample.Vec3"
95   }
96 
x(&self) -> f3297   pub fn x(&self) -> f32 {
98     let mut mem = core::mem::MaybeUninit::<f32>::uninit();
99     unsafe {
100       core::ptr::copy_nonoverlapping(
101         self.0[0..].as_ptr(),
102         mem.as_mut_ptr() as *mut u8,
103         core::mem::size_of::<f32>(),
104       );
105       mem.assume_init()
106     }.from_little_endian()
107   }
108 
set_x(&mut self, x: f32)109   pub fn set_x(&mut self, x: f32) {
110     let x_le = x.to_little_endian();
111     unsafe {
112       core::ptr::copy_nonoverlapping(
113         &x_le as *const f32 as *const u8,
114         self.0[0..].as_mut_ptr(),
115         core::mem::size_of::<f32>(),
116       );
117     }
118   }
119 
y(&self) -> f32120   pub fn y(&self) -> f32 {
121     let mut mem = core::mem::MaybeUninit::<f32>::uninit();
122     unsafe {
123       core::ptr::copy_nonoverlapping(
124         self.0[4..].as_ptr(),
125         mem.as_mut_ptr() as *mut u8,
126         core::mem::size_of::<f32>(),
127       );
128       mem.assume_init()
129     }.from_little_endian()
130   }
131 
set_y(&mut self, x: f32)132   pub fn set_y(&mut self, x: f32) {
133     let x_le = x.to_little_endian();
134     unsafe {
135       core::ptr::copy_nonoverlapping(
136         &x_le as *const f32 as *const u8,
137         self.0[4..].as_mut_ptr(),
138         core::mem::size_of::<f32>(),
139       );
140     }
141   }
142 
z(&self) -> f32143   pub fn z(&self) -> f32 {
144     let mut mem = core::mem::MaybeUninit::<f32>::uninit();
145     unsafe {
146       core::ptr::copy_nonoverlapping(
147         self.0[8..].as_ptr(),
148         mem.as_mut_ptr() as *mut u8,
149         core::mem::size_of::<f32>(),
150       );
151       mem.assume_init()
152     }.from_little_endian()
153   }
154 
set_z(&mut self, x: f32)155   pub fn set_z(&mut self, x: f32) {
156     let x_le = x.to_little_endian();
157     unsafe {
158       core::ptr::copy_nonoverlapping(
159         &x_le as *const f32 as *const u8,
160         self.0[8..].as_mut_ptr(),
161         core::mem::size_of::<f32>(),
162       );
163     }
164   }
165 
unpack(&self) -> Vec3T166   pub fn unpack(&self) -> Vec3T {
167     Vec3T {
168       x: self.x(),
169       y: self.y(),
170       z: self.z(),
171     }
172   }
173 }
174 
175 #[derive(Debug, Clone, PartialEq, Default)]
176 pub struct Vec3T {
177   pub x: f32,
178   pub y: f32,
179   pub z: f32,
180 }
181 impl Vec3T {
pack(&self) -> Vec3182   pub fn pack(&self) -> Vec3 {
183     Vec3::new(
184       self.x,
185       self.y,
186       self.z,
187     )
188   }
189 }
190 
191