1 #![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)] 2 #[repr(C)] 3 #[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] 4 pub struct __BindgenBitfieldUnit<Storage> { 5 storage: Storage, 6 } 7 impl<Storage> __BindgenBitfieldUnit<Storage> { 8 #[inline] new(storage: Storage) -> Self9 pub const fn new(storage: Storage) -> Self { 10 Self { storage } 11 } 12 } 13 impl<Storage> __BindgenBitfieldUnit<Storage> 14 where 15 Storage: AsRef<[u8]> + AsMut<[u8]>, 16 { 17 #[inline] get_bit(&self, index: usize) -> bool18 pub fn get_bit(&self, index: usize) -> bool { 19 debug_assert!(index / 8 < self.storage.as_ref().len()); 20 let byte_index = index / 8; 21 let byte = self.storage.as_ref()[byte_index]; 22 let bit_index = if cfg!(target_endian = "big") { 23 7 - (index % 8) 24 } else { 25 index % 8 26 }; 27 let mask = 1 << bit_index; 28 byte & mask == mask 29 } 30 #[inline] set_bit(&mut self, index: usize, val: bool)31 pub fn set_bit(&mut self, index: usize, val: bool) { 32 debug_assert!(index / 8 < self.storage.as_ref().len()); 33 let byte_index = index / 8; 34 let byte = &mut self.storage.as_mut()[byte_index]; 35 let bit_index = if cfg!(target_endian = "big") { 36 7 - (index % 8) 37 } else { 38 index % 8 39 }; 40 let mask = 1 << bit_index; 41 if val { 42 *byte |= mask; 43 } else { 44 *byte &= !mask; 45 } 46 } 47 #[inline] get(&self, bit_offset: usize, bit_width: u8) -> u6448 pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 { 49 debug_assert!(bit_width <= 64); 50 debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); 51 debug_assert!( 52 (bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len(), 53 ); 54 let mut val = 0; 55 for i in 0..(bit_width as usize) { 56 if self.get_bit(i + bit_offset) { 57 let index = if cfg!(target_endian = "big") { 58 bit_width as usize - 1 - i 59 } else { 60 i 61 }; 62 val |= 1 << index; 63 } 64 } 65 val 66 } 67 #[inline] set(&mut self, bit_offset: usize, bit_width: u8, val: u64)68 pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) { 69 debug_assert!(bit_width <= 64); 70 debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); 71 debug_assert!( 72 (bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len(), 73 ); 74 for i in 0..(bit_width as usize) { 75 let mask = 1 << i; 76 let val_bit_is_set = val & mask == mask; 77 let index = if cfg!(target_endian = "big") { 78 bit_width as usize - 1 - i 79 } else { 80 i 81 }; 82 self.set_bit(index + bit_offset, val_bit_is_set); 83 } 84 } 85 } 86 #[repr(C)] 87 #[derive(Default)] 88 pub struct __IncompleteArrayField<T>(::std::marker::PhantomData<T>, [T; 0]); 89 impl<T> __IncompleteArrayField<T> { 90 #[inline] new() -> Self91 pub const fn new() -> Self { 92 __IncompleteArrayField(::std::marker::PhantomData, []) 93 } 94 #[inline] as_ptr(&self) -> *const T95 pub fn as_ptr(&self) -> *const T { 96 self as *const _ as *const T 97 } 98 #[inline] as_mut_ptr(&mut self) -> *mut T99 pub fn as_mut_ptr(&mut self) -> *mut T { 100 self as *mut _ as *mut T 101 } 102 #[inline] as_slice(&self, len: usize) -> &[T]103 pub unsafe fn as_slice(&self, len: usize) -> &[T] { 104 ::std::slice::from_raw_parts(self.as_ptr(), len) 105 } 106 #[inline] as_mut_slice(&mut self, len: usize) -> &mut [T]107 pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [T] { 108 ::std::slice::from_raw_parts_mut(self.as_mut_ptr(), len) 109 } 110 } 111 impl<T> ::std::fmt::Debug for __IncompleteArrayField<T> { fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result112 fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { 113 fmt.write_str("__IncompleteArrayField") 114 } 115 } 116 #[repr(C)] 117 #[derive(Debug)] 118 pub struct rte_kni_fifo { 119 ///< Next position to be written 120 pub write: ::std::os::raw::c_uint, 121 ///< Next position to be read 122 pub read: ::std::os::raw::c_uint, 123 ///< Circular buffer length 124 pub len: ::std::os::raw::c_uint, 125 ///< Pointer size - for 32/64 bit OS 126 pub elem_size: ::std::os::raw::c_uint, 127 ///< The buffer contains mbuf pointers 128 pub buffer: __IncompleteArrayField<*mut ::std::os::raw::c_void>, 129 } 130 #[allow(clippy::unnecessary_operation, clippy::identity_op)] 131 const _: () = { 132 ["Size of rte_kni_fifo"][::std::mem::size_of::<rte_kni_fifo>() - 16usize]; 133 ["Alignment of rte_kni_fifo"][::std::mem::align_of::<rte_kni_fifo>() - 8usize]; 134 [ 135 "Offset of field: rte_kni_fifo::write", 136 ][::std::mem::offset_of!(rte_kni_fifo, write) - 0usize]; 137 [ 138 "Offset of field: rte_kni_fifo::read", 139 ][::std::mem::offset_of!(rte_kni_fifo, read) - 4usize]; 140 [ 141 "Offset of field: rte_kni_fifo::len", 142 ][::std::mem::offset_of!(rte_kni_fifo, len) - 8usize]; 143 [ 144 "Offset of field: rte_kni_fifo::elem_size", 145 ][::std::mem::offset_of!(rte_kni_fifo, elem_size) - 12usize]; 146 [ 147 "Offset of field: rte_kni_fifo::buffer", 148 ][::std::mem::offset_of!(rte_kni_fifo, buffer) - 16usize]; 149 }; 150 impl Default for rte_kni_fifo { default() -> Self151 fn default() -> Self { 152 let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); 153 unsafe { 154 ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); 155 s.assume_init() 156 } 157 } 158 } 159 #[repr(C)] 160 #[repr(align(8))] 161 #[derive(Debug, Default, Copy, Clone)] 162 pub struct rte_eth_link { 163 ///< ETH_SPEED_NUM_ 164 pub link_speed: u32, 165 pub _bitfield_align_1: [u8; 0], 166 pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, 167 pub __bindgen_padding_0: [u8; 3usize], 168 } 169 #[allow(clippy::unnecessary_operation, clippy::identity_op)] 170 const _: () = { 171 ["Size of rte_eth_link"][::std::mem::size_of::<rte_eth_link>() - 8usize]; 172 ["Alignment of rte_eth_link"][::std::mem::align_of::<rte_eth_link>() - 8usize]; 173 [ 174 "Offset of field: rte_eth_link::link_speed", 175 ][::std::mem::offset_of!(rte_eth_link, link_speed) - 0usize]; 176 }; 177 impl rte_eth_link { 178 #[inline] link_duplex(&self) -> u16179 pub fn link_duplex(&self) -> u16 { 180 unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u16) } 181 } 182 #[inline] set_link_duplex(&mut self, val: u16)183 pub fn set_link_duplex(&mut self, val: u16) { 184 unsafe { 185 let val: u16 = ::std::mem::transmute(val); 186 self._bitfield_1.set(0usize, 1u8, val as u64) 187 } 188 } 189 #[inline] link_autoneg(&self) -> u16190 pub fn link_autoneg(&self) -> u16 { 191 unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u16) } 192 } 193 #[inline] set_link_autoneg(&mut self, val: u16)194 pub fn set_link_autoneg(&mut self, val: u16) { 195 unsafe { 196 let val: u16 = ::std::mem::transmute(val); 197 self._bitfield_1.set(1usize, 1u8, val as u64) 198 } 199 } 200 #[inline] link_status(&self) -> u16201 pub fn link_status(&self) -> u16 { 202 unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u16) } 203 } 204 #[inline] set_link_status(&mut self, val: u16)205 pub fn set_link_status(&mut self, val: u16) { 206 unsafe { 207 let val: u16 = ::std::mem::transmute(val); 208 self._bitfield_1.set(2usize, 1u8, val as u64) 209 } 210 } 211 #[inline] new_bitfield_1( link_duplex: u16, link_autoneg: u16, link_status: u16, ) -> __BindgenBitfieldUnit<[u8; 1usize]>212 pub fn new_bitfield_1( 213 link_duplex: u16, 214 link_autoneg: u16, 215 link_status: u16, 216 ) -> __BindgenBitfieldUnit<[u8; 1usize]> { 217 let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); 218 __bindgen_bitfield_unit 219 .set( 220 0usize, 221 1u8, 222 { 223 let link_duplex: u16 = unsafe { ::std::mem::transmute(link_duplex) }; 224 link_duplex as u64 225 }, 226 ); 227 __bindgen_bitfield_unit 228 .set( 229 1usize, 230 1u8, 231 { 232 let link_autoneg: u16 = unsafe { 233 ::std::mem::transmute(link_autoneg) 234 }; 235 link_autoneg as u64 236 }, 237 ); 238 __bindgen_bitfield_unit 239 .set( 240 2usize, 241 1u8, 242 { 243 let link_status: u16 = unsafe { ::std::mem::transmute(link_status) }; 244 link_status as u64 245 }, 246 ); 247 __bindgen_bitfield_unit 248 } 249 } 250