1 use std::cell::UnsafeCell; 2 use std::fmt; 3 use std::ops::Deref; 4 5 /// `AtomicU16` providing an additional `load_unsync` function. 6 pub(crate) struct AtomicU16 { 7 inner: UnsafeCell<std::sync::atomic::AtomicU16>, 8 } 9 10 unsafe impl Send for AtomicU16 {} 11 unsafe impl Sync for AtomicU16 {} 12 13 impl AtomicU16 { new(val: u16) -> AtomicU1614 pub(crate) const fn new(val: u16) -> AtomicU16 { 15 let inner = UnsafeCell::new(std::sync::atomic::AtomicU16::new(val)); 16 AtomicU16 { inner } 17 } 18 19 /// Performs an unsynchronized load. 20 /// 21 /// # Safety 22 /// 23 /// All mutations must have happened before the unsynchronized load. 24 /// Additionally, there must be no concurrent mutations. unsync_load(&self) -> u1625 pub(crate) unsafe fn unsync_load(&self) -> u16 { 26 *(*self.inner.get()).get_mut() 27 } 28 } 29 30 impl Deref for AtomicU16 { 31 type Target = std::sync::atomic::AtomicU16; 32 deref(&self) -> &Self::Target33 fn deref(&self) -> &Self::Target { 34 // safety: it is always safe to access `&self` fns on the inner value as 35 // we never perform unsafe mutations. 36 unsafe { &*self.inner.get() } 37 } 38 } 39 40 impl fmt::Debug for AtomicU16 { fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result41 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { 42 self.deref().fmt(fmt) 43 } 44 } 45