• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // automatically generated by the FlatBuffers compiler, do not modify
2 // @generated
3 extern crate alloc;
4 extern crate flatbuffers;
5 use alloc::boxed::Box;
6 use alloc::string::{String, ToString};
7 use alloc::vec::Vec;
8 use core::mem;
9 use core::cmp::Ordering;
10 use self::flatbuffers::{EndianScalar, Follow};
11 use super::*;
12 // struct Ability, aligned to 4
13 #[repr(transparent)]
14 #[derive(Clone, Copy, PartialEq)]
15 pub struct Ability(pub [u8; 8]);
16 impl Default for Ability {
default() -> Self17   fn default() -> Self {
18     Self([0; 8])
19   }
20 }
21 impl core::fmt::Debug for Ability {
fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22   fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
23     f.debug_struct("Ability")
24       .field("id", &self.id())
25       .field("distance", &self.distance())
26       .finish()
27   }
28 }
29 
30 impl flatbuffers::SimpleToVerifyInSlice for Ability {}
31 impl<'a> flatbuffers::Follow<'a> for Ability {
32   type Inner = &'a Ability;
33   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner34   unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
35     <&'a Ability>::follow(buf, loc)
36   }
37 }
38 impl<'a> flatbuffers::Follow<'a> for &'a Ability {
39   type Inner = &'a Ability;
40   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner41   unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
42     flatbuffers::follow_cast_ref::<Ability>(buf, loc)
43   }
44 }
45 impl<'b> flatbuffers::Push for Ability {
46     type Output = Ability;
47     #[inline]
push(&self, dst: &mut [u8], _written_len: usize)48     unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
49         let src = ::core::slice::from_raw_parts(self as *const Ability as *const u8, <Self as flatbuffers::Push>::size());
50         dst.copy_from_slice(src);
51     }
52     #[inline]
alignment() -> flatbuffers::PushAlignment53     fn alignment() -> flatbuffers::PushAlignment {
54         flatbuffers::PushAlignment::new(4)
55     }
56 }
57 
58 impl<'a> flatbuffers::Verifiable for Ability {
59   #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>60   fn run_verifier(
61     v: &mut flatbuffers::Verifier, pos: usize
62   ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
63     use self::flatbuffers::Verifiable;
64     v.in_buffer::<Self>(pos)
65   }
66 }
67 
68 impl<'a> Ability {
69   #[allow(clippy::too_many_arguments)]
new( id: u32, distance: u32, ) -> Self70   pub fn new(
71     id: u32,
72     distance: u32,
73   ) -> Self {
74     let mut s = Self([0; 8]);
75     s.set_id(id);
76     s.set_distance(distance);
77     s
78   }
79 
get_fully_qualified_name() -> &'static str80   pub const fn get_fully_qualified_name() -> &'static str {
81     "MyGame.Example.Ability"
82   }
83 
id(&self) -> u3284   pub fn id(&self) -> u32 {
85     let mut mem = core::mem::MaybeUninit::<<u32 as EndianScalar>::Scalar>::uninit();
86     // Safety:
87     // Created from a valid Table for this object
88     // Which contains a valid value in this slot
89     EndianScalar::from_little_endian(unsafe {
90       core::ptr::copy_nonoverlapping(
91         self.0[0..].as_ptr(),
92         mem.as_mut_ptr() as *mut u8,
93         core::mem::size_of::<<u32 as EndianScalar>::Scalar>(),
94       );
95       mem.assume_init()
96     })
97   }
98 
set_id(&mut self, x: u32)99   pub fn set_id(&mut self, x: u32) {
100     let x_le = x.to_little_endian();
101     // Safety:
102     // Created from a valid Table for this object
103     // Which contains a valid value in this slot
104     unsafe {
105       core::ptr::copy_nonoverlapping(
106         &x_le as *const _ as *const u8,
107         self.0[0..].as_mut_ptr(),
108         core::mem::size_of::<<u32 as EndianScalar>::Scalar>(),
109       );
110     }
111   }
112 
113   #[inline]
key_compare_less_than(&self, o: &Ability) -> bool114   pub fn key_compare_less_than(&self, o: &Ability) -> bool {
115     self.id() < o.id()
116   }
117 
118   #[inline]
key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering119   pub fn key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering {
120     let key = self.id();
121     key.cmp(&val)
122   }
distance(&self) -> u32123   pub fn distance(&self) -> u32 {
124     let mut mem = core::mem::MaybeUninit::<<u32 as EndianScalar>::Scalar>::uninit();
125     // Safety:
126     // Created from a valid Table for this object
127     // Which contains a valid value in this slot
128     EndianScalar::from_little_endian(unsafe {
129       core::ptr::copy_nonoverlapping(
130         self.0[4..].as_ptr(),
131         mem.as_mut_ptr() as *mut u8,
132         core::mem::size_of::<<u32 as EndianScalar>::Scalar>(),
133       );
134       mem.assume_init()
135     })
136   }
137 
set_distance(&mut self, x: u32)138   pub fn set_distance(&mut self, x: u32) {
139     let x_le = x.to_little_endian();
140     // Safety:
141     // Created from a valid Table for this object
142     // Which contains a valid value in this slot
143     unsafe {
144       core::ptr::copy_nonoverlapping(
145         &x_le as *const _ as *const u8,
146         self.0[4..].as_mut_ptr(),
147         core::mem::size_of::<<u32 as EndianScalar>::Scalar>(),
148       );
149     }
150   }
151 
unpack(&self) -> AbilityT152   pub fn unpack(&self) -> AbilityT {
153     AbilityT {
154       id: self.id(),
155       distance: self.distance(),
156     }
157   }
158 }
159 
160 #[derive(Debug, Clone, PartialEq, Default)]
161 pub struct AbilityT {
162   pub id: u32,
163   pub distance: u32,
164 }
165 impl AbilityT {
pack(&self) -> Ability166   pub fn pack(&self) -> Ability {
167     Ability::new(
168       self.id,
169       self.distance,
170     )
171   }
172 }
173 
174