1 // automatically generated by the FlatBuffers compiler, do not modify 2 // @generated 3 extern crate alloc; 4 extern crate flatbuffers; 5 use alloc::boxed::Box; 6 use alloc::string::{String, ToString}; 7 use alloc::vec::Vec; 8 use core::mem; 9 use core::cmp::Ordering; 10 use self::flatbuffers::{EndianScalar, Follow}; 11 use super::*; 12 // struct Object, aligned to 4 13 #[repr(transparent)] 14 #[derive(Clone, Copy, PartialEq)] 15 pub(crate) struct Object(pub [u8; 4]); 16 impl Default for Object { default() -> Self17 fn default() -> Self { 18 Self([0; 4]) 19 } 20 } 21 impl core::fmt::Debug for Object { fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 23 f.debug_struct("Object") 24 .field("value", &self.value()) 25 .finish() 26 } 27 } 28 29 impl flatbuffers::SimpleToVerifyInSlice for Object {} 30 impl<'a> flatbuffers::Follow<'a> for Object { 31 type Inner = &'a Object; 32 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner33 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 34 <&'a Object>::follow(buf, loc) 35 } 36 } 37 impl<'a> flatbuffers::Follow<'a> for &'a Object { 38 type Inner = &'a Object; 39 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner40 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 41 flatbuffers::follow_cast_ref::<Object>(buf, loc) 42 } 43 } 44 impl<'b> flatbuffers::Push for Object { 45 type Output = Object; 46 #[inline] push(&self, dst: &mut [u8], _written_len: usize)47 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { 48 let src = ::core::slice::from_raw_parts(self as *const Object as *const u8, Self::size()); 49 dst.copy_from_slice(src); 50 } 51 } 52 53 impl<'a> flatbuffers::Verifiable for Object { 54 #[inline] run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>55 fn run_verifier( 56 v: &mut flatbuffers::Verifier, pos: usize 57 ) -> Result<(), flatbuffers::InvalidFlatbuffer> { 58 use self::flatbuffers::Verifiable; 59 v.in_buffer::<Self>(pos) 60 } 61 } 62 63 impl<'a> Object { 64 #[allow(clippy::too_many_arguments)] new( value: i32, ) -> Self65 pub fn new( 66 value: i32, 67 ) -> Self { 68 let mut s = Self([0; 4]); 69 s.set_value(value); 70 s 71 } 72 get_fully_qualified_name() -> &'static str73 pub const fn get_fully_qualified_name() -> &'static str { 74 "Object" 75 } 76 value(&self) -> i3277 pub fn value(&self) -> i32 { 78 let mut mem = core::mem::MaybeUninit::<<i32 as EndianScalar>::Scalar>::uninit(); 79 // Safety: 80 // Created from a valid Table for this object 81 // Which contains a valid value in this slot 82 EndianScalar::from_little_endian(unsafe { 83 core::ptr::copy_nonoverlapping( 84 self.0[0..].as_ptr(), 85 mem.as_mut_ptr() as *mut u8, 86 core::mem::size_of::<<i32 as EndianScalar>::Scalar>(), 87 ); 88 mem.assume_init() 89 }) 90 } 91 set_value(&mut self, x: i32)92 pub fn set_value(&mut self, x: i32) { 93 let x_le = x.to_little_endian(); 94 // Safety: 95 // Created from a valid Table for this object 96 // Which contains a valid value in this slot 97 unsafe { 98 core::ptr::copy_nonoverlapping( 99 &x_le as *const _ as *const u8, 100 self.0[0..].as_mut_ptr(), 101 core::mem::size_of::<<i32 as EndianScalar>::Scalar>(), 102 ); 103 } 104 } 105 unpack(&self) -> ObjectT106 pub fn unpack(&self) -> ObjectT { 107 ObjectT { 108 value: self.value(), 109 } 110 } 111 } 112 113 #[derive(Debug, Clone, PartialEq, Default)] 114 pub(crate) struct ObjectT { 115 pub value: i32, 116 } 117 impl ObjectT { pack(&self) -> Object118 pub fn pack(&self) -> Object { 119 Object::new( 120 self.value, 121 ) 122 } 123 } 124 125