1 // automatically generated by the FlatBuffers compiler, do not modify 2 // @generated 3 extern crate alloc; 4 extern crate flatbuffers; 5 use alloc::boxed::Box; 6 use alloc::string::{String, ToString}; 7 use alloc::vec::Vec; 8 use core::mem; 9 use core::cmp::Ordering; 10 use self::flatbuffers::{EndianScalar, Follow}; 11 use super::*; 12 // struct ArrayStruct, aligned to 8 13 #[repr(transparent)] 14 #[derive(Clone, Copy, PartialEq)] 15 pub struct ArrayStruct(pub [u8; 160]); 16 impl Default for ArrayStruct { default() -> Self17 fn default() -> Self { 18 Self([0; 160]) 19 } 20 } 21 impl core::fmt::Debug for ArrayStruct { fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 23 f.debug_struct("ArrayStruct") 24 .field("a", &self.a()) 25 .field("b", &self.b()) 26 .field("c", &self.c()) 27 .field("d", &self.d()) 28 .field("e", &self.e()) 29 .field("f", &self.f()) 30 .finish() 31 } 32 } 33 34 impl flatbuffers::SimpleToVerifyInSlice for ArrayStruct {} 35 impl<'a> flatbuffers::Follow<'a> for ArrayStruct { 36 type Inner = &'a ArrayStruct; 37 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner38 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 39 <&'a ArrayStruct>::follow(buf, loc) 40 } 41 } 42 impl<'a> flatbuffers::Follow<'a> for &'a ArrayStruct { 43 type Inner = &'a ArrayStruct; 44 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner45 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 46 flatbuffers::follow_cast_ref::<ArrayStruct>(buf, loc) 47 } 48 } 49 impl<'b> flatbuffers::Push for ArrayStruct { 50 type Output = ArrayStruct; 51 #[inline] push(&self, dst: &mut [u8], _written_len: usize)52 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { 53 let src = ::core::slice::from_raw_parts(self as *const ArrayStruct as *const u8, Self::size()); 54 dst.copy_from_slice(src); 55 } 56 } 57 58 impl<'a> flatbuffers::Verifiable for ArrayStruct { 59 #[inline] run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>60 fn run_verifier( 61 v: &mut flatbuffers::Verifier, pos: usize 62 ) -> Result<(), flatbuffers::InvalidFlatbuffer> { 63 use self::flatbuffers::Verifiable; 64 v.in_buffer::<Self>(pos) 65 } 66 } 67 68 impl<'a> ArrayStruct { 69 #[allow(clippy::too_many_arguments)] new( a: f32, b: &[i32; 15], c: i8, d: &[NestedStruct; 2], e: i32, f: &[i64; 2], ) -> Self70 pub fn new( 71 a: f32, 72 b: &[i32; 15], 73 c: i8, 74 d: &[NestedStruct; 2], 75 e: i32, 76 f: &[i64; 2], 77 ) -> Self { 78 let mut s = Self([0; 160]); 79 s.set_a(a); 80 s.set_b(b); 81 s.set_c(c); 82 s.set_d(d); 83 s.set_e(e); 84 s.set_f(f); 85 s 86 } 87 get_fully_qualified_name() -> &'static str88 pub const fn get_fully_qualified_name() -> &'static str { 89 "MyGame.Example.ArrayStruct" 90 } 91 a(&self) -> f3292 pub fn a(&self) -> f32 { 93 let mut mem = core::mem::MaybeUninit::<<f32 as EndianScalar>::Scalar>::uninit(); 94 // Safety: 95 // Created from a valid Table for this object 96 // Which contains a valid value in this slot 97 EndianScalar::from_little_endian(unsafe { 98 core::ptr::copy_nonoverlapping( 99 self.0[0..].as_ptr(), 100 mem.as_mut_ptr() as *mut u8, 101 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 102 ); 103 mem.assume_init() 104 }) 105 } 106 set_a(&mut self, x: f32)107 pub fn set_a(&mut self, x: f32) { 108 let x_le = x.to_little_endian(); 109 // Safety: 110 // Created from a valid Table for this object 111 // Which contains a valid value in this slot 112 unsafe { 113 core::ptr::copy_nonoverlapping( 114 &x_le as *const _ as *const u8, 115 self.0[0..].as_mut_ptr(), 116 core::mem::size_of::<<f32 as EndianScalar>::Scalar>(), 117 ); 118 } 119 } 120 b(&'a self) -> flatbuffers::Array<'a, i32, 15>121 pub fn b(&'a self) -> flatbuffers::Array<'a, i32, 15> { 122 // Safety: 123 // Created from a valid Table for this object 124 // Which contains a valid array in this slot 125 unsafe { flatbuffers::Array::follow(&self.0, 4) } 126 } 127 set_b(&mut self, items: &[i32; 15])128 pub fn set_b(&mut self, items: &[i32; 15]) { 129 // Safety: 130 // Created from a valid Table for this object 131 // Which contains a valid array in this slot 132 unsafe { flatbuffers::emplace_scalar_array(&mut self.0, 4, items) }; 133 } 134 c(&self) -> i8135 pub fn c(&self) -> i8 { 136 let mut mem = core::mem::MaybeUninit::<<i8 as EndianScalar>::Scalar>::uninit(); 137 // Safety: 138 // Created from a valid Table for this object 139 // Which contains a valid value in this slot 140 EndianScalar::from_little_endian(unsafe { 141 core::ptr::copy_nonoverlapping( 142 self.0[64..].as_ptr(), 143 mem.as_mut_ptr() as *mut u8, 144 core::mem::size_of::<<i8 as EndianScalar>::Scalar>(), 145 ); 146 mem.assume_init() 147 }) 148 } 149 set_c(&mut self, x: i8)150 pub fn set_c(&mut self, x: i8) { 151 let x_le = x.to_little_endian(); 152 // Safety: 153 // Created from a valid Table for this object 154 // Which contains a valid value in this slot 155 unsafe { 156 core::ptr::copy_nonoverlapping( 157 &x_le as *const _ as *const u8, 158 self.0[64..].as_mut_ptr(), 159 core::mem::size_of::<<i8 as EndianScalar>::Scalar>(), 160 ); 161 } 162 } 163 d(&'a self) -> flatbuffers::Array<'a, NestedStruct, 2>164 pub fn d(&'a self) -> flatbuffers::Array<'a, NestedStruct, 2> { 165 // Safety: 166 // Created from a valid Table for this object 167 // Which contains a valid array in this slot 168 unsafe { flatbuffers::Array::follow(&self.0, 72) } 169 } 170 set_d(&mut self, x: &[NestedStruct; 2])171 pub fn set_d(&mut self, x: &[NestedStruct; 2]) { 172 // Safety: 173 // Created from a valid Table for this object 174 // Which contains a valid array in this slot 175 unsafe { 176 core::ptr::copy( 177 x.as_ptr() as *const u8, 178 self.0.as_mut_ptr().add(72), 179 64, 180 ); 181 } 182 } 183 e(&self) -> i32184 pub fn e(&self) -> i32 { 185 let mut mem = core::mem::MaybeUninit::<<i32 as EndianScalar>::Scalar>::uninit(); 186 // Safety: 187 // Created from a valid Table for this object 188 // Which contains a valid value in this slot 189 EndianScalar::from_little_endian(unsafe { 190 core::ptr::copy_nonoverlapping( 191 self.0[136..].as_ptr(), 192 mem.as_mut_ptr() as *mut u8, 193 core::mem::size_of::<<i32 as EndianScalar>::Scalar>(), 194 ); 195 mem.assume_init() 196 }) 197 } 198 set_e(&mut self, x: i32)199 pub fn set_e(&mut self, x: i32) { 200 let x_le = x.to_little_endian(); 201 // Safety: 202 // Created from a valid Table for this object 203 // Which contains a valid value in this slot 204 unsafe { 205 core::ptr::copy_nonoverlapping( 206 &x_le as *const _ as *const u8, 207 self.0[136..].as_mut_ptr(), 208 core::mem::size_of::<<i32 as EndianScalar>::Scalar>(), 209 ); 210 } 211 } 212 f(&'a self) -> flatbuffers::Array<'a, i64, 2>213 pub fn f(&'a self) -> flatbuffers::Array<'a, i64, 2> { 214 // Safety: 215 // Created from a valid Table for this object 216 // Which contains a valid array in this slot 217 unsafe { flatbuffers::Array::follow(&self.0, 144) } 218 } 219 set_f(&mut self, items: &[i64; 2])220 pub fn set_f(&mut self, items: &[i64; 2]) { 221 // Safety: 222 // Created from a valid Table for this object 223 // Which contains a valid array in this slot 224 unsafe { flatbuffers::emplace_scalar_array(&mut self.0, 144, items) }; 225 } 226 unpack(&self) -> ArrayStructT227 pub fn unpack(&self) -> ArrayStructT { 228 ArrayStructT { 229 a: self.a(), 230 b: self.b().into(), 231 c: self.c(), 232 d: { let d = self.d(); flatbuffers::array_init(|i| d.get(i).unpack()) }, 233 e: self.e(), 234 f: self.f().into(), 235 } 236 } 237 } 238 239 #[derive(Debug, Clone, PartialEq, Default)] 240 pub struct ArrayStructT { 241 pub a: f32, 242 pub b: [i32; 15], 243 pub c: i8, 244 pub d: [NestedStructT; 2], 245 pub e: i32, 246 pub f: [i64; 2], 247 } 248 impl ArrayStructT { pack(&self) -> ArrayStruct249 pub fn pack(&self) -> ArrayStruct { 250 ArrayStruct::new( 251 self.a, 252 &self.b, 253 self.c, 254 &flatbuffers::array_init(|i| self.d[i].pack()), 255 self.e, 256 &self.f, 257 ) 258 } 259 } 260 261