• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // automatically generated by the FlatBuffers compiler, do not modify
2 // @generated
3 extern crate alloc;
4 extern crate flatbuffers;
5 use alloc::boxed::Box;
6 use alloc::string::{String, ToString};
7 use alloc::vec::Vec;
8 use core::mem;
9 use core::cmp::Ordering;
10 use self::flatbuffers::{EndianScalar, Follow};
11 use super::*;
12 // struct Ability, aligned to 4
13 #[repr(transparent)]
14 #[derive(Clone, Copy, PartialEq)]
15 pub struct Ability(pub [u8; 8]);
16 impl Default for Ability {
default() -> Self17   fn default() -> Self {
18     Self([0; 8])
19   }
20 }
21 impl core::fmt::Debug for Ability {
fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result22   fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
23     f.debug_struct("Ability")
24       .field("id", &self.id())
25       .field("distance", &self.distance())
26       .finish()
27   }
28 }
29 
30 impl flatbuffers::SimpleToVerifyInSlice for Ability {}
31 impl flatbuffers::SafeSliceAccess for Ability {}
32 impl<'a> flatbuffers::Follow<'a> for Ability {
33   type Inner = &'a Ability;
34   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner35   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
36     <&'a Ability>::follow(buf, loc)
37   }
38 }
39 impl<'a> flatbuffers::Follow<'a> for &'a Ability {
40   type Inner = &'a Ability;
41   #[inline]
follow(buf: &'a [u8], loc: usize) -> Self::Inner42   fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
43     flatbuffers::follow_cast_ref::<Ability>(buf, loc)
44   }
45 }
46 impl<'b> flatbuffers::Push for Ability {
47     type Output = Ability;
48     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])49     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
50         let src = unsafe {
51             ::core::slice::from_raw_parts(self as *const Ability as *const u8, Self::size())
52         };
53         dst.copy_from_slice(src);
54     }
55 }
56 impl<'b> flatbuffers::Push for &'b Ability {
57     type Output = Ability;
58 
59     #[inline]
push(&self, dst: &mut [u8], _rest: &[u8])60     fn push(&self, dst: &mut [u8], _rest: &[u8]) {
61         let src = unsafe {
62             ::core::slice::from_raw_parts(*self as *const Ability as *const u8, Self::size())
63         };
64         dst.copy_from_slice(src);
65     }
66 }
67 
68 impl<'a> flatbuffers::Verifiable for Ability {
69   #[inline]
run_verifier( v: &mut flatbuffers::Verifier, pos: usize ) -> Result<(), flatbuffers::InvalidFlatbuffer>70   fn run_verifier(
71     v: &mut flatbuffers::Verifier, pos: usize
72   ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
73     use self::flatbuffers::Verifiable;
74     v.in_buffer::<Self>(pos)
75   }
76 }
77 
78 impl<'a> Ability {
79   #[allow(clippy::too_many_arguments)]
new( id: u32, distance: u32, ) -> Self80   pub fn new(
81     id: u32,
82     distance: u32,
83   ) -> Self {
84     let mut s = Self([0; 8]);
85     s.set_id(id);
86     s.set_distance(distance);
87     s
88   }
89 
get_fully_qualified_name() -> &'static str90   pub const fn get_fully_qualified_name() -> &'static str {
91     "MyGame.Example.Ability"
92   }
93 
id(&self) -> u3294   pub fn id(&self) -> u32 {
95     let mut mem = core::mem::MaybeUninit::<u32>::uninit();
96     unsafe {
97       core::ptr::copy_nonoverlapping(
98         self.0[0..].as_ptr(),
99         mem.as_mut_ptr() as *mut u8,
100         core::mem::size_of::<u32>(),
101       );
102       mem.assume_init()
103     }.from_little_endian()
104   }
105 
set_id(&mut self, x: u32)106   pub fn set_id(&mut self, x: u32) {
107     let x_le = x.to_little_endian();
108     unsafe {
109       core::ptr::copy_nonoverlapping(
110         &x_le as *const u32 as *const u8,
111         self.0[0..].as_mut_ptr(),
112         core::mem::size_of::<u32>(),
113       );
114     }
115   }
116 
117   #[inline]
key_compare_less_than(&self, o: &Ability) -> bool118   pub fn key_compare_less_than(&self, o: &Ability) -> bool {
119     self.id() < o.id()
120   }
121 
122   #[inline]
key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering123   pub fn key_compare_with_value(&self, val: u32) -> ::core::cmp::Ordering {
124     let key = self.id();
125     key.cmp(&val)
126   }
distance(&self) -> u32127   pub fn distance(&self) -> u32 {
128     let mut mem = core::mem::MaybeUninit::<u32>::uninit();
129     unsafe {
130       core::ptr::copy_nonoverlapping(
131         self.0[4..].as_ptr(),
132         mem.as_mut_ptr() as *mut u8,
133         core::mem::size_of::<u32>(),
134       );
135       mem.assume_init()
136     }.from_little_endian()
137   }
138 
set_distance(&mut self, x: u32)139   pub fn set_distance(&mut self, x: u32) {
140     let x_le = x.to_little_endian();
141     unsafe {
142       core::ptr::copy_nonoverlapping(
143         &x_le as *const u32 as *const u8,
144         self.0[4..].as_mut_ptr(),
145         core::mem::size_of::<u32>(),
146       );
147     }
148   }
149 
unpack(&self) -> AbilityT150   pub fn unpack(&self) -> AbilityT {
151     AbilityT {
152       id: self.id(),
153       distance: self.distance(),
154     }
155   }
156 }
157 
158 #[derive(Debug, Clone, PartialEq, Default)]
159 pub struct AbilityT {
160   pub id: u32,
161   pub distance: u32,
162 }
163 impl AbilityT {
pack(&self) -> Ability164   pub fn pack(&self) -> Ability {
165     Ability::new(
166       self.id,
167       self.distance,
168     )
169   }
170 }
171 
172