1 #![allow(
2 dead_code,
3 non_snake_case,
4 non_camel_case_types,
5 non_upper_case_globals
6 )]
7
8 #[repr(C)]
9 #[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
10 pub struct __BindgenBitfieldUnit<Storage> {
11 storage: Storage,
12 }
13 impl<Storage> __BindgenBitfieldUnit<Storage> {
14 #[inline]
new(storage: Storage) -> Self15 pub const fn new(storage: Storage) -> Self {
16 Self { storage }
17 }
18 }
19 impl<Storage> __BindgenBitfieldUnit<Storage>
20 where
21 Storage: AsRef<[u8]> + AsMut<[u8]>,
22 {
23 #[inline]
get_bit(&self, index: usize) -> bool24 pub fn get_bit(&self, index: usize) -> bool {
25 debug_assert!(index / 8 < self.storage.as_ref().len());
26 let byte_index = index / 8;
27 let byte = self.storage.as_ref()[byte_index];
28 let bit_index = if cfg!(target_endian = "big") {
29 7 - (index % 8)
30 } else {
31 index % 8
32 };
33 let mask = 1 << bit_index;
34 byte & mask == mask
35 }
36 #[inline]
set_bit(&mut self, index: usize, val: bool)37 pub fn set_bit(&mut self, index: usize, val: bool) {
38 debug_assert!(index / 8 < self.storage.as_ref().len());
39 let byte_index = index / 8;
40 let byte = &mut self.storage.as_mut()[byte_index];
41 let bit_index = if cfg!(target_endian = "big") {
42 7 - (index % 8)
43 } else {
44 index % 8
45 };
46 let mask = 1 << bit_index;
47 if val {
48 *byte |= mask;
49 } else {
50 *byte &= !mask;
51 }
52 }
53 #[inline]
get(&self, bit_offset: usize, bit_width: u8) -> u6454 pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 {
55 debug_assert!(bit_width <= 64);
56 debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
57 debug_assert!(
58 (bit_offset + (bit_width as usize)) / 8 <=
59 self.storage.as_ref().len()
60 );
61 let mut val = 0;
62 for i in 0..(bit_width as usize) {
63 if self.get_bit(i + bit_offset) {
64 let index = if cfg!(target_endian = "big") {
65 bit_width as usize - 1 - i
66 } else {
67 i
68 };
69 val |= 1 << index;
70 }
71 }
72 val
73 }
74 #[inline]
set(&mut self, bit_offset: usize, bit_width: u8, val: u64)75 pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
76 debug_assert!(bit_width <= 64);
77 debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
78 debug_assert!(
79 (bit_offset + (bit_width as usize)) / 8 <=
80 self.storage.as_ref().len()
81 );
82 for i in 0..(bit_width as usize) {
83 let mask = 1 << i;
84 let val_bit_is_set = val & mask == mask;
85 let index = if cfg!(target_endian = "big") {
86 bit_width as usize - 1 - i
87 } else {
88 i
89 };
90 self.set_bit(index + bit_offset, val_bit_is_set);
91 }
92 }
93 }
94 #[repr(C)]
95 #[derive(Default)]
96 pub struct __IncompleteArrayField<T>(::std::marker::PhantomData<T>, [T; 0]);
97 impl<T> __IncompleteArrayField<T> {
98 #[inline]
new() -> Self99 pub const fn new() -> Self {
100 __IncompleteArrayField(::std::marker::PhantomData, [])
101 }
102 #[inline]
as_ptr(&self) -> *const T103 pub fn as_ptr(&self) -> *const T {
104 self as *const _ as *const T
105 }
106 #[inline]
as_mut_ptr(&mut self) -> *mut T107 pub fn as_mut_ptr(&mut self) -> *mut T {
108 self as *mut _ as *mut T
109 }
110 #[inline]
as_slice(&self, len: usize) -> &[T]111 pub unsafe fn as_slice(&self, len: usize) -> &[T] {
112 ::std::slice::from_raw_parts(self.as_ptr(), len)
113 }
114 #[inline]
as_mut_slice(&mut self, len: usize) -> &mut [T]115 pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [T] {
116 ::std::slice::from_raw_parts_mut(self.as_mut_ptr(), len)
117 }
118 }
119 impl<T> ::std::fmt::Debug for __IncompleteArrayField<T> {
fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result120 fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
121 fmt.write_str("__IncompleteArrayField")
122 }
123 }
124 #[repr(C)]
125 #[derive(Debug)]
126 pub struct rte_kni_fifo {
127 ///< Next position to be written
128 pub write: ::std::os::raw::c_uint,
129 ///< Next position to be read
130 pub read: ::std::os::raw::c_uint,
131 ///< Circular buffer length
132 pub len: ::std::os::raw::c_uint,
133 ///< Pointer size - for 32/64 bit OS
134 pub elem_size: ::std::os::raw::c_uint,
135 ///< The buffer contains mbuf pointers
136 pub buffer: __IncompleteArrayField<*mut ::std::os::raw::c_void>,
137 }
138 #[test]
bindgen_test_layout_rte_kni_fifo()139 fn bindgen_test_layout_rte_kni_fifo() {
140 const UNINIT: ::std::mem::MaybeUninit<rte_kni_fifo> =
141 ::std::mem::MaybeUninit::uninit();
142 let ptr = UNINIT.as_ptr();
143 assert_eq!(
144 ::std::mem::size_of::<rte_kni_fifo>(),
145 16usize,
146 concat!("Size of: ", stringify!(rte_kni_fifo))
147 );
148 assert_eq!(
149 ::std::mem::align_of::<rte_kni_fifo>(),
150 8usize,
151 concat!("Alignment of ", stringify!(rte_kni_fifo))
152 );
153 assert_eq!(
154 unsafe { ::std::ptr::addr_of!((*ptr).write) as usize - ptr as usize },
155 0usize,
156 concat!(
157 "Offset of field: ",
158 stringify!(rte_kni_fifo),
159 "::",
160 stringify!(write)
161 )
162 );
163 assert_eq!(
164 unsafe { ::std::ptr::addr_of!((*ptr).read) as usize - ptr as usize },
165 4usize,
166 concat!(
167 "Offset of field: ",
168 stringify!(rte_kni_fifo),
169 "::",
170 stringify!(read)
171 )
172 );
173 assert_eq!(
174 unsafe { ::std::ptr::addr_of!((*ptr).len) as usize - ptr as usize },
175 8usize,
176 concat!(
177 "Offset of field: ",
178 stringify!(rte_kni_fifo),
179 "::",
180 stringify!(len)
181 )
182 );
183 assert_eq!(
184 unsafe {
185 ::std::ptr::addr_of!((*ptr).elem_size) as usize - ptr as usize
186 },
187 12usize,
188 concat!(
189 "Offset of field: ",
190 stringify!(rte_kni_fifo),
191 "::",
192 stringify!(elem_size)
193 )
194 );
195 assert_eq!(
196 unsafe { ::std::ptr::addr_of!((*ptr).buffer) as usize - ptr as usize },
197 16usize,
198 concat!(
199 "Offset of field: ",
200 stringify!(rte_kni_fifo),
201 "::",
202 stringify!(buffer)
203 )
204 );
205 }
206 impl Default for rte_kni_fifo {
default() -> Self207 fn default() -> Self {
208 let mut s = ::std::mem::MaybeUninit::<Self>::uninit();
209 unsafe {
210 ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1);
211 s.assume_init()
212 }
213 }
214 }
215 #[repr(C)]
216 #[repr(align(8))]
217 #[derive(Debug, Default, Copy, Clone)]
218 pub struct rte_eth_link {
219 ///< ETH_SPEED_NUM_
220 pub link_speed: u32,
221 pub _bitfield_align_1: [u8; 0],
222 pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
223 pub __bindgen_padding_0: [u8; 3usize],
224 }
225 #[test]
bindgen_test_layout_rte_eth_link()226 fn bindgen_test_layout_rte_eth_link() {
227 const UNINIT: ::std::mem::MaybeUninit<rte_eth_link> =
228 ::std::mem::MaybeUninit::uninit();
229 let ptr = UNINIT.as_ptr();
230 assert_eq!(
231 ::std::mem::size_of::<rte_eth_link>(),
232 8usize,
233 concat!("Size of: ", stringify!(rte_eth_link))
234 );
235 assert_eq!(
236 ::std::mem::align_of::<rte_eth_link>(),
237 8usize,
238 concat!("Alignment of ", stringify!(rte_eth_link))
239 );
240 assert_eq!(
241 unsafe {
242 ::std::ptr::addr_of!((*ptr).link_speed) as usize - ptr as usize
243 },
244 0usize,
245 concat!(
246 "Offset of field: ",
247 stringify!(rte_eth_link),
248 "::",
249 stringify!(link_speed)
250 )
251 );
252 }
253 impl rte_eth_link {
254 #[inline]
link_duplex(&self) -> u16255 pub fn link_duplex(&self) -> u16 {
256 unsafe {
257 ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u16)
258 }
259 }
260 #[inline]
set_link_duplex(&mut self, val: u16)261 pub fn set_link_duplex(&mut self, val: u16) {
262 unsafe {
263 let val: u16 = ::std::mem::transmute(val);
264 self._bitfield_1.set(0usize, 1u8, val as u64)
265 }
266 }
267 #[inline]
link_autoneg(&self) -> u16268 pub fn link_autoneg(&self) -> u16 {
269 unsafe {
270 ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u16)
271 }
272 }
273 #[inline]
set_link_autoneg(&mut self, val: u16)274 pub fn set_link_autoneg(&mut self, val: u16) {
275 unsafe {
276 let val: u16 = ::std::mem::transmute(val);
277 self._bitfield_1.set(1usize, 1u8, val as u64)
278 }
279 }
280 #[inline]
link_status(&self) -> u16281 pub fn link_status(&self) -> u16 {
282 unsafe {
283 ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u16)
284 }
285 }
286 #[inline]
set_link_status(&mut self, val: u16)287 pub fn set_link_status(&mut self, val: u16) {
288 unsafe {
289 let val: u16 = ::std::mem::transmute(val);
290 self._bitfield_1.set(2usize, 1u8, val as u64)
291 }
292 }
293 #[inline]
new_bitfield_1( link_duplex: u16, link_autoneg: u16, link_status: u16, ) -> __BindgenBitfieldUnit<[u8; 1usize]>294 pub fn new_bitfield_1(
295 link_duplex: u16,
296 link_autoneg: u16,
297 link_status: u16,
298 ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
299 let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> =
300 Default::default();
301 __bindgen_bitfield_unit.set(0usize, 1u8, {
302 let link_duplex: u16 =
303 unsafe { ::std::mem::transmute(link_duplex) };
304 link_duplex as u64
305 });
306 __bindgen_bitfield_unit.set(1usize, 1u8, {
307 let link_autoneg: u16 =
308 unsafe { ::std::mem::transmute(link_autoneg) };
309 link_autoneg as u64
310 });
311 __bindgen_bitfield_unit.set(2usize, 1u8, {
312 let link_status: u16 =
313 unsafe { ::std::mem::transmute(link_status) };
314 link_status as u64
315 });
316 __bindgen_bitfield_unit
317 }
318 }
319