1 // automatically generated by the FlatBuffers compiler, do not modify 2 // @generated 3 extern crate alloc; 4 extern crate flatbuffers; 5 use alloc::boxed::Box; 6 use alloc::string::{String, ToString}; 7 use alloc::vec::Vec; 8 use core::mem; 9 use core::cmp::Ordering; 10 use self::flatbuffers::{EndianScalar, Follow}; 11 use super::*; 12 // struct Ability, aligned to 4 13 #[repr(transparent)] 14 #[derive(Clone, Copy, PartialEq)] 15 pub struct Ability(pub [u8; 8]); 16 impl Default for Ability { default() -> Self17 fn default() -> Self { 18 Self([0; 8]) 19 } 20 } 21 impl core::fmt::Debug for Ability { fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 23 f.debug_struct("Ability") 24 .field("id", &self.id()) 25 .field("distance", &self.distance()) 26 .finish() 27 } 28 } 29 30 impl flatbuffers::SimpleToVerifyInSlice for Ability {} 31 impl<'a> flatbuffers::Follow<'a> for Ability { 32 type Inner = &'a Ability; 33 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner34 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 35 <&'a Ability>::follow(buf, loc) 36 } 37 } 38 impl<'a> flatbuffers::Follow<'a> for &'a Ability { 39 type Inner = &'a Ability; 40 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner41 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 42 flatbuffers::follow_cast_ref::<Ability>(buf, loc) 43 } 44 } 45 impl<'b> flatbuffers::Push for Ability { 46 type Output = Ability; 47 #[inline] push(&self, dst: &mut [u8], _written_len: usize)48 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { 49 let src = ::core::slice::from_raw_parts(self as *const Ability as *const u8, Self::size()); 50 dst.copy_from_slice(src); 51 } 52 } 53 54 impl<'a> flatbuffers::Verifiable for Ability { 55 #[inline] run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>56 fn run_verifier( 57 v: &mut flatbuffers::Verifier, pos: usize 58 ) -> Result<(), flatbuffers::InvalidFlatbuffer> { 59 use self::flatbuffers::Verifiable; 60 v.in_buffer::<Self>(pos) 61 } 62 } 63 64 impl<'a> Ability { 65 #[allow(clippy::too_many_arguments)] new( id: u32, distance: u32, ) -> Self66 pub fn new( 67 id: u32, 68 distance: u32, 69 ) -> Self { 70 let mut s = Self([0; 8]); 71 s.set_id(id); 72 s.set_distance(distance); 73 s 74 } 75 get_fully_qualified_name() -> &'static str76 pub const fn get_fully_qualified_name() -> &'static str { 77 "MyGame.Example.Ability" 78 } 79 id(&self) -> u3280 pub fn id(&self) -> u32 { 81 let mut mem = core::mem::MaybeUninit::<<u32 as EndianScalar>::Scalar>::uninit(); 82 // Safety: 83 // Created from a valid Table for this object 84 // Which contains a valid value in this slot 85 EndianScalar::from_little_endian(unsafe { 86 core::ptr::copy_nonoverlapping( 87 self.0[0..].as_ptr(), 88 mem.as_mut_ptr() as *mut u8, 89 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 90 ); 91 mem.assume_init() 92 }) 93 } 94 set_id(&mut self, x: u32)95 pub fn set_id(&mut self, x: u32) { 96 let x_le = x.to_little_endian(); 97 // Safety: 98 // Created from a valid Table for this object 99 // Which contains a valid value in this slot 100 unsafe { 101 core::ptr::copy_nonoverlapping( 102 &x_le as *const _ as *const u8, 103 self.0[0..].as_mut_ptr(), 104 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 105 ); 106 } 107 } 108 109 #[inline] key_compare_less_than(&self, o: &Ability) -> bool110 pub fn key_compare_less_than(&self, o: &Ability) -> bool { 111 self.id() < o.id() 112 } 113 114 #[inline] key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering115 pub fn key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering { 116 let key = self.id(); 117 key.cmp(&val) 118 } distance(&self) -> u32119 pub fn distance(&self) -> u32 { 120 let mut mem = core::mem::MaybeUninit::<<u32 as EndianScalar>::Scalar>::uninit(); 121 // Safety: 122 // Created from a valid Table for this object 123 // Which contains a valid value in this slot 124 EndianScalar::from_little_endian(unsafe { 125 core::ptr::copy_nonoverlapping( 126 self.0[4..].as_ptr(), 127 mem.as_mut_ptr() as *mut u8, 128 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 129 ); 130 mem.assume_init() 131 }) 132 } 133 set_distance(&mut self, x: u32)134 pub fn set_distance(&mut self, x: u32) { 135 let x_le = x.to_little_endian(); 136 // Safety: 137 // Created from a valid Table for this object 138 // Which contains a valid value in this slot 139 unsafe { 140 core::ptr::copy_nonoverlapping( 141 &x_le as *const _ as *const u8, 142 self.0[4..].as_mut_ptr(), 143 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 144 ); 145 } 146 } 147 unpack(&self) -> AbilityT148 pub fn unpack(&self) -> AbilityT { 149 AbilityT { 150 id: self.id(), 151 distance: self.distance(), 152 } 153 } 154 } 155 156 #[derive(Debug, Clone, PartialEq, Default)] 157 pub struct AbilityT { 158 pub id: u32, 159 pub distance: u32, 160 } 161 impl AbilityT { pack(&self) -> Ability162 pub fn pack(&self) -> Ability { 163 Ability::new( 164 self.id, 165 self.distance, 166 ) 167 } 168 } 169 170