1 // automatically generated by the FlatBuffers compiler, do not modify 2 // @generated 3 extern crate alloc; 4 extern crate flatbuffers; 5 use alloc::boxed::Box; 6 use alloc::string::{String, ToString}; 7 use alloc::vec::Vec; 8 use core::mem; 9 use core::cmp::Ordering; 10 use self::flatbuffers::{EndianScalar, Follow}; 11 use super::*; 12 // struct Vec3, aligned to 4 13 #[repr(transparent)] 14 #[derive(Clone, Copy, PartialEq)] 15 pub struct Vec3(pub [u8; 12]); 16 impl Default for Vec3 { default() -> Self17 fn default() -> Self { 18 Self([0; 12]) 19 } 20 } 21 impl core::fmt::Debug for Vec3 { fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 23 f.debug_struct("Vec3") 24 .field("x", &self.x()) 25 .field("y", &self.y()) 26 .field("z", &self.z()) 27 .finish() 28 } 29 } 30 31 impl flatbuffers::SimpleToVerifyInSlice for Vec3 {} 32 impl<'a> flatbuffers::Follow<'a> for Vec3 { 33 type Inner = &'a Vec3; 34 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner35 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 36 <&'a Vec3>::follow(buf, loc) 37 } 38 } 39 impl<'a> flatbuffers::Follow<'a> for &'a Vec3 { 40 type Inner = &'a Vec3; 41 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner42 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 43 flatbuffers::follow_cast_ref::<Vec3>(buf, loc) 44 } 45 } 46 impl<'b> flatbuffers::Push for Vec3 { 47 type Output = Vec3; 48 #[inline] push(&self, dst: &mut [u8], _written_len: usize)49 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { 50 let src = ::core::slice::from_raw_parts(self as *const Vec3 as *const u8, Self::size()); 51 dst.copy_from_slice(src); 52 } 53 } 54 55 impl<'a> flatbuffers::Verifiable for Vec3 { 56 #[inline] run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>57 fn run_verifier( 58 v: &mut flatbuffers::Verifier, pos: usize 59 ) -> Result<(), flatbuffers::InvalidFlatbuffer> { 60 use self::flatbuffers::Verifiable; 61 v.in_buffer::<Self>(pos) 62 } 63 } 64 65 impl<'a> Vec3 { 66 #[allow(clippy::too_many_arguments)] new( x: f32, y: f32, z: f32, ) -> Self67 pub fn new( 68 x: f32, 69 y: f32, 70 z: f32, 71 ) -> Self { 72 let mut s = Self([0; 12]); 73 s.set_x(x); 74 s.set_y(y); 75 s.set_z(z); 76 s 77 } 78 get_fully_qualified_name() -> &'static str79 pub const fn get_fully_qualified_name() -> &'static str { 80 "MyGame.Sample.Vec3" 81 } 82 x(&self) -> f3283 pub fn x(&self) -> f32 { 84 let mut mem = core::mem::MaybeUninit::<<f32 as EndianScalar>::Scalar>::uninit(); 85 // Safety: 86 // Created from a valid Table for this object 87 // Which contains a valid value in this slot 88 EndianScalar::from_little_endian(unsafe { 89 core::ptr::copy_nonoverlapping( 90 self.0[0..].as_ptr(), 91 mem.as_mut_ptr() as *mut u8, 92 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 93 ); 94 mem.assume_init() 95 }) 96 } 97 set_x(&mut self, x: f32)98 pub fn set_x(&mut self, x: f32) { 99 let x_le = x.to_little_endian(); 100 // Safety: 101 // Created from a valid Table for this object 102 // Which contains a valid value in this slot 103 unsafe { 104 core::ptr::copy_nonoverlapping( 105 &x_le as *const _ as *const u8, 106 self.0[0..].as_mut_ptr(), 107 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 108 ); 109 } 110 } 111 y(&self) -> f32112 pub fn y(&self) -> f32 { 113 let mut mem = core::mem::MaybeUninit::<<f32 as EndianScalar>::Scalar>::uninit(); 114 // Safety: 115 // Created from a valid Table for this object 116 // Which contains a valid value in this slot 117 EndianScalar::from_little_endian(unsafe { 118 core::ptr::copy_nonoverlapping( 119 self.0[4..].as_ptr(), 120 mem.as_mut_ptr() as *mut u8, 121 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 122 ); 123 mem.assume_init() 124 }) 125 } 126 set_y(&mut self, x: f32)127 pub fn set_y(&mut self, x: f32) { 128 let x_le = x.to_little_endian(); 129 // Safety: 130 // Created from a valid Table for this object 131 // Which contains a valid value in this slot 132 unsafe { 133 core::ptr::copy_nonoverlapping( 134 &x_le as *const _ as *const u8, 135 self.0[4..].as_mut_ptr(), 136 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 137 ); 138 } 139 } 140 z(&self) -> f32141 pub fn z(&self) -> f32 { 142 let mut mem = core::mem::MaybeUninit::<<f32 as EndianScalar>::Scalar>::uninit(); 143 // Safety: 144 // Created from a valid Table for this object 145 // Which contains a valid value in this slot 146 EndianScalar::from_little_endian(unsafe { 147 core::ptr::copy_nonoverlapping( 148 self.0[8..].as_ptr(), 149 mem.as_mut_ptr() as *mut u8, 150 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 151 ); 152 mem.assume_init() 153 }) 154 } 155 set_z(&mut self, x: f32)156 pub fn set_z(&mut self, x: f32) { 157 let x_le = x.to_little_endian(); 158 // Safety: 159 // Created from a valid Table for this object 160 // Which contains a valid value in this slot 161 unsafe { 162 core::ptr::copy_nonoverlapping( 163 &x_le as *const _ as *const u8, 164 self.0[8..].as_mut_ptr(), 165 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 166 ); 167 } 168 } 169 unpack(&self) -> Vec3T170 pub fn unpack(&self) -> Vec3T { 171 Vec3T { 172 x: self.x(), 173 y: self.y(), 174 z: self.z(), 175 } 176 } 177 } 178 179 #[derive(Debug, Clone, PartialEq, Default)] 180 pub struct Vec3T { 181 pub x: f32, 182 pub y: f32, 183 pub z: f32, 184 } 185 impl Vec3T { pack(&self) -> Vec3186 pub fn pack(&self) -> Vec3 { 187 Vec3::new( 188 self.x, 189 self.y, 190 self.z, 191 ) 192 } 193 } 194 195