1 // automatically generated by the FlatBuffers compiler, do not modify 2 // @generated 3 extern crate alloc; 4 extern crate flatbuffers; 5 use alloc::boxed::Box; 6 use alloc::string::{String, ToString}; 7 use alloc::vec::Vec; 8 use core::mem; 9 use core::cmp::Ordering; 10 extern crate serde; 11 use self::serde::ser::{Serialize, Serializer, SerializeStruct}; 12 use self::flatbuffers::{EndianScalar, Follow}; 13 use super::*; 14 // struct Ability, aligned to 4 15 #[repr(transparent)] 16 #[derive(Clone, Copy, PartialEq)] 17 pub struct Ability(pub [u8; 8]); 18 impl Default for Ability { default() -> Self19 fn default() -> Self { 20 Self([0; 8]) 21 } 22 } 23 impl core::fmt::Debug for Ability { fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result24 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 25 f.debug_struct("Ability") 26 .field("id", &self.id()) 27 .field("distance", &self.distance()) 28 .finish() 29 } 30 } 31 32 impl flatbuffers::SimpleToVerifyInSlice for Ability {} 33 impl<'a> flatbuffers::Follow<'a> for Ability { 34 type Inner = &'a Ability; 35 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner36 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 37 <&'a Ability>::follow(buf, loc) 38 } 39 } 40 impl<'a> flatbuffers::Follow<'a> for &'a Ability { 41 type Inner = &'a Ability; 42 #[inline] follow(buf: &'a [u8], loc: usize) -> Self::Inner43 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { 44 flatbuffers::follow_cast_ref::<Ability>(buf, loc) 45 } 46 } 47 impl<'b> flatbuffers::Push for Ability { 48 type Output = Ability; 49 #[inline] push(&self, dst: &mut [u8], _written_len: usize)50 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { 51 let src = ::core::slice::from_raw_parts(self as *const Ability as *const u8, Self::size()); 52 dst.copy_from_slice(src); 53 } 54 } 55 56 impl<'a> flatbuffers::Verifiable for Ability { 57 #[inline] run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>58 fn run_verifier( 59 v: &mut flatbuffers::Verifier, pos: usize 60 ) -> Result<(), flatbuffers::InvalidFlatbuffer> { 61 use self::flatbuffers::Verifiable; 62 v.in_buffer::<Self>(pos) 63 } 64 } 65 66 impl Serialize for Ability { serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer,67 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> 68 where 69 S: Serializer, 70 { 71 let mut s = serializer.serialize_struct("Ability", 2)?; 72 s.serialize_field("id", &self.id())?; 73 s.serialize_field("distance", &self.distance())?; 74 s.end() 75 } 76 } 77 78 impl<'a> Ability { 79 #[allow(clippy::too_many_arguments)] new( id: u32, distance: u32, ) -> Self80 pub fn new( 81 id: u32, 82 distance: u32, 83 ) -> Self { 84 let mut s = Self([0; 8]); 85 s.set_id(id); 86 s.set_distance(distance); 87 s 88 } 89 get_fully_qualified_name() -> &'static str90 pub const fn get_fully_qualified_name() -> &'static str { 91 "MyGame.Example.Ability" 92 } 93 id(&self) -> u3294 pub fn id(&self) -> u32 { 95 let mut mem = core::mem::MaybeUninit::<<u32 as EndianScalar>::Scalar>::uninit(); 96 // Safety: 97 // Created from a valid Table for this object 98 // Which contains a valid value in this slot 99 EndianScalar::from_little_endian(unsafe { 100 core::ptr::copy_nonoverlapping( 101 self.0[0..].as_ptr(), 102 mem.as_mut_ptr() as *mut u8, 103 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 104 ); 105 mem.assume_init() 106 }) 107 } 108 set_id(&mut self, x: u32)109 pub fn set_id(&mut self, x: u32) { 110 let x_le = x.to_little_endian(); 111 // Safety: 112 // Created from a valid Table for this object 113 // Which contains a valid value in this slot 114 unsafe { 115 core::ptr::copy_nonoverlapping( 116 &x_le as *const _ as *const u8, 117 self.0[0..].as_mut_ptr(), 118 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 119 ); 120 } 121 } 122 123 #[inline] key_compare_less_than(&self, o: &Ability) -> bool124 pub fn key_compare_less_than(&self, o: &Ability) -> bool { 125 self.id() < o.id() 126 } 127 128 #[inline] key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering129 pub fn key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering { 130 let key = self.id(); 131 key.cmp(&val) 132 } distance(&self) -> u32133 pub fn distance(&self) -> u32 { 134 let mut mem = core::mem::MaybeUninit::<<u32 as EndianScalar>::Scalar>::uninit(); 135 // Safety: 136 // Created from a valid Table for this object 137 // Which contains a valid value in this slot 138 EndianScalar::from_little_endian(unsafe { 139 core::ptr::copy_nonoverlapping( 140 self.0[4..].as_ptr(), 141 mem.as_mut_ptr() as *mut u8, 142 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 143 ); 144 mem.assume_init() 145 }) 146 } 147 set_distance(&mut self, x: u32)148 pub fn set_distance(&mut self, x: u32) { 149 let x_le = x.to_little_endian(); 150 // Safety: 151 // Created from a valid Table for this object 152 // Which contains a valid value in this slot 153 unsafe { 154 core::ptr::copy_nonoverlapping( 155 &x_le as *const _ as *const u8, 156 self.0[4..].as_mut_ptr(), 157 core::mem::size_of::<<u32 as EndianScalar>::Scalar>(), 158 ); 159 } 160 } 161 unpack(&self) -> AbilityT162 pub fn unpack(&self) -> AbilityT { 163 AbilityT { 164 id: self.id(), 165 distance: self.distance(), 166 } 167 } 168 } 169 170 #[derive(Debug, Clone, PartialEq, Default)] 171 pub struct AbilityT { 172 pub id: u32, 173 pub distance: u32, 174 } 175 impl AbilityT { pack(&self) -> Ability176 pub fn pack(&self) -> Ability { 177 Ability::new( 178 self.id, 179 self.distance, 180 ) 181 } 182 } 183 184