1 use std::{
2 cell::UnsafeCell,
3 panic::{RefUnwindSafe, UnwindSafe},
4 sync::atomic::{AtomicU8, Ordering},
5 };
6
7 pub(crate) struct OnceCell<T> {
8 state: AtomicU8,
9 value: UnsafeCell<Option<T>>,
10 }
11
12 const INCOMPLETE: u8 = 0x0;
13 const RUNNING: u8 = 0x1;
14 const COMPLETE: u8 = 0x2;
15
16 // Why do we need `T: Send`?
17 // Thread A creates a `OnceCell` and shares it with
18 // scoped thread B, which fills the cell, which is
19 // then destroyed by A. That is, destructor observes
20 // a sent value.
21 unsafe impl<T: Sync + Send> Sync for OnceCell<T> {}
22 unsafe impl<T: Send> Send for OnceCell<T> {}
23
24 impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceCell<T> {}
25 impl<T: UnwindSafe> UnwindSafe for OnceCell<T> {}
26
27 impl<T> OnceCell<T> {
new() -> OnceCell<T>28 pub(crate) const fn new() -> OnceCell<T> {
29 OnceCell { state: AtomicU8::new(INCOMPLETE), value: UnsafeCell::new(None) }
30 }
31
with_value(value: T) -> OnceCell<T>32 pub(crate) const fn with_value(value: T) -> OnceCell<T> {
33 OnceCell { state: AtomicU8::new(COMPLETE), value: UnsafeCell::new(Some(value)) }
34 }
35
36 /// Safety: synchronizes with store to value via Release/Acquire.
37 #[inline]
is_initialized(&self) -> bool38 pub(crate) fn is_initialized(&self) -> bool {
39 self.state.load(Ordering::Acquire) == COMPLETE
40 }
41
42 /// Safety: synchronizes with store to value via `is_initialized` or mutex
43 /// lock/unlock, writes value only once because of the mutex.
44 #[cold]
initialize<F, E>(&self, f: F) -> Result<(), E> where F: FnOnce() -> Result<T, E>,45 pub(crate) fn initialize<F, E>(&self, f: F) -> Result<(), E>
46 where
47 F: FnOnce() -> Result<T, E>,
48 {
49 let mut f = Some(f);
50 let mut res: Result<(), E> = Ok(());
51 let slot: *mut Option<T> = self.value.get();
52 initialize_inner(&self.state, &mut || {
53 // We are calling user-supplied function and need to be careful.
54 // - if it returns Err, we unlock mutex and return without touching anything
55 // - if it panics, we unlock mutex and propagate panic without touching anything
56 // - if it calls `set` or `get_or_try_init` re-entrantly, we get a deadlock on
57 // mutex, which is important for safety. We *could* detect this and panic,
58 // but that is more complicated
59 // - finally, if it returns Ok, we store the value and store the flag with
60 // `Release`, which synchronizes with `Acquire`s.
61 let f = unsafe { crate::unwrap_unchecked(f.take()) };
62 match f() {
63 Ok(value) => unsafe {
64 // Safe b/c we have a unique access and no panic may happen
65 // until the cell is marked as initialized.
66 debug_assert!((*slot).is_none());
67 *slot = Some(value);
68 true
69 },
70 Err(err) => {
71 res = Err(err);
72 false
73 }
74 }
75 });
76 res
77 }
78
79 #[cold]
wait(&self)80 pub(crate) fn wait(&self) {
81 let key = &self.state as *const _ as usize;
82 unsafe {
83 parking_lot_core::park(
84 key,
85 || self.state.load(Ordering::Acquire) != COMPLETE,
86 || (),
87 |_, _| (),
88 parking_lot_core::DEFAULT_PARK_TOKEN,
89 None,
90 );
91 }
92 }
93
94 /// Get the reference to the underlying value, without checking if the cell
95 /// is initialized.
96 ///
97 /// # Safety
98 ///
99 /// Caller must ensure that the cell is in initialized state, and that
100 /// the contents are acquired by (synchronized to) this thread.
get_unchecked(&self) -> &T101 pub(crate) unsafe fn get_unchecked(&self) -> &T {
102 debug_assert!(self.is_initialized());
103 let slot = &*self.value.get();
104 crate::unwrap_unchecked(slot.as_ref())
105 }
106
107 /// Gets the mutable reference to the underlying value.
108 /// Returns `None` if the cell is empty.
get_mut(&mut self) -> Option<&mut T>109 pub(crate) fn get_mut(&mut self) -> Option<&mut T> {
110 // Safe b/c we have an exclusive access
111 let slot: &mut Option<T> = unsafe { &mut *self.value.get() };
112 slot.as_mut()
113 }
114
115 /// Consumes this `OnceCell`, returning the wrapped value.
116 /// Returns `None` if the cell was empty.
into_inner(self) -> Option<T>117 pub(crate) fn into_inner(self) -> Option<T> {
118 self.value.into_inner()
119 }
120 }
121
122 struct Guard<'a> {
123 state: &'a AtomicU8,
124 new_state: u8,
125 }
126
127 impl<'a> Drop for Guard<'a> {
drop(&mut self)128 fn drop(&mut self) {
129 self.state.store(self.new_state, Ordering::Release);
130 unsafe {
131 let key = self.state as *const AtomicU8 as usize;
132 parking_lot_core::unpark_all(key, parking_lot_core::DEFAULT_UNPARK_TOKEN);
133 }
134 }
135 }
136
137 // Note: this is intentionally monomorphic
138 #[inline(never)]
initialize_inner(state: &AtomicU8, init: &mut dyn FnMut() -> bool)139 fn initialize_inner(state: &AtomicU8, init: &mut dyn FnMut() -> bool) {
140 loop {
141 let exchange =
142 state.compare_exchange_weak(INCOMPLETE, RUNNING, Ordering::Acquire, Ordering::Acquire);
143 match exchange {
144 Ok(_) => {
145 let mut guard = Guard { state, new_state: INCOMPLETE };
146 if init() {
147 guard.new_state = COMPLETE;
148 }
149 return;
150 }
151 Err(COMPLETE) => return,
152 Err(RUNNING) => unsafe {
153 let key = state as *const AtomicU8 as usize;
154 parking_lot_core::park(
155 key,
156 || state.load(Ordering::Relaxed) == RUNNING,
157 || (),
158 |_, _| (),
159 parking_lot_core::DEFAULT_PARK_TOKEN,
160 None,
161 );
162 },
163 Err(INCOMPLETE) => (),
164 Err(_) => debug_assert!(false),
165 }
166 }
167 }
168
169 #[test]
test_size()170 fn test_size() {
171 use std::mem::size_of;
172
173 assert_eq!(size_of::<OnceCell<bool>>(), 1 * size_of::<bool>() + size_of::<u8>());
174 }
175