1 use crate::sync::rwlock::owned_read_guard::OwnedRwLockReadGuard; 2 use crate::sync::rwlock::owned_write_guard_mapped::OwnedRwLockMappedWriteGuard; 3 use crate::sync::rwlock::RwLock; 4 use std::fmt; 5 use std::marker::PhantomData; 6 use std::mem::{self, ManuallyDrop}; 7 use std::ops; 8 use std::sync::Arc; 9 10 /// Owned RAII structure used to release the exclusive write access of a lock when 11 /// dropped. 12 /// 13 /// This structure is created by the [`write_owned`] method 14 /// on [`RwLock`]. 15 /// 16 /// [`write_owned`]: method@crate::sync::RwLock::write_owned 17 /// [`RwLock`]: struct@crate::sync::RwLock 18 pub struct OwnedRwLockWriteGuard<T: ?Sized> { 19 #[cfg(all(tokio_unstable, feature = "tracing"))] 20 pub(super) resource_span: tracing::Span, 21 pub(super) permits_acquired: u32, 22 // ManuallyDrop allows us to destructure into this field without running the destructor. 23 pub(super) lock: ManuallyDrop<Arc<RwLock<T>>>, 24 pub(super) data: *mut T, 25 pub(super) _p: PhantomData<T>, 26 } 27 28 impl<T: ?Sized> OwnedRwLockWriteGuard<T> { 29 /// Makes a new [`OwnedRwLockMappedWriteGuard`] for a component of the locked 30 /// data. 31 /// 32 /// This operation cannot fail as the `OwnedRwLockWriteGuard` passed in 33 /// already locked the data. 34 /// 35 /// This is an associated function that needs to be used as 36 /// `OwnedRwLockWriteGuard::map(..)`. A method would interfere with methods 37 /// of the same name on the contents of the locked data. 38 /// 39 /// # Examples 40 /// 41 /// ``` 42 /// use std::sync::Arc; 43 /// use tokio::sync::{RwLock, OwnedRwLockWriteGuard}; 44 /// 45 /// #[derive(Debug, Clone, Copy, PartialEq, Eq)] 46 /// struct Foo(u32); 47 /// 48 /// # #[tokio::main] 49 /// # async fn main() { 50 /// let lock = Arc::new(RwLock::new(Foo(1))); 51 /// 52 /// { 53 /// let lock = Arc::clone(&lock); 54 /// let mut mapped = OwnedRwLockWriteGuard::map(lock.write_owned().await, |f| &mut f.0); 55 /// *mapped = 2; 56 /// } 57 /// 58 /// assert_eq!(Foo(2), *lock.read().await); 59 /// # } 60 /// ``` 61 #[inline] map<F, U: ?Sized>(mut this: Self, f: F) -> OwnedRwLockMappedWriteGuard<T, U> where F: FnOnce(&mut T) -> &mut U,62 pub fn map<F, U: ?Sized>(mut this: Self, f: F) -> OwnedRwLockMappedWriteGuard<T, U> 63 where 64 F: FnOnce(&mut T) -> &mut U, 65 { 66 let data = f(&mut *this) as *mut U; 67 let lock = unsafe { ManuallyDrop::take(&mut this.lock) }; 68 let permits_acquired = this.permits_acquired; 69 #[cfg(all(tokio_unstable, feature = "tracing"))] 70 let resource_span = this.resource_span.clone(); 71 // NB: Forget to avoid drop impl from being called. 72 mem::forget(this); 73 74 OwnedRwLockMappedWriteGuard { 75 permits_acquired, 76 lock: ManuallyDrop::new(lock), 77 data, 78 _p: PhantomData, 79 #[cfg(all(tokio_unstable, feature = "tracing"))] 80 resource_span, 81 } 82 } 83 84 /// Attempts to make a new [`OwnedRwLockMappedWriteGuard`] for a component 85 /// of the locked data. The original guard is returned if the closure 86 /// returns `None`. 87 /// 88 /// This operation cannot fail as the `OwnedRwLockWriteGuard` passed in 89 /// already locked the data. 90 /// 91 /// This is an associated function that needs to be 92 /// used as `OwnedRwLockWriteGuard::try_map(...)`. A method would interfere 93 /// with methods of the same name on the contents of the locked data. 94 /// 95 /// [`RwLockMappedWriteGuard`]: struct@crate::sync::RwLockMappedWriteGuard 96 /// 97 /// # Examples 98 /// 99 /// ``` 100 /// use std::sync::Arc; 101 /// use tokio::sync::{RwLock, OwnedRwLockWriteGuard}; 102 /// 103 /// #[derive(Debug, Clone, Copy, PartialEq, Eq)] 104 /// struct Foo(u32); 105 /// 106 /// # #[tokio::main] 107 /// # async fn main() { 108 /// let lock = Arc::new(RwLock::new(Foo(1))); 109 /// 110 /// { 111 /// let guard = Arc::clone(&lock).write_owned().await; 112 /// let mut guard = OwnedRwLockWriteGuard::try_map(guard, |f| Some(&mut f.0)).expect("should not fail"); 113 /// *guard = 2; 114 /// } 115 /// 116 /// assert_eq!(Foo(2), *lock.read().await); 117 /// # } 118 /// ``` 119 #[inline] try_map<F, U: ?Sized>( mut this: Self, f: F, ) -> Result<OwnedRwLockMappedWriteGuard<T, U>, Self> where F: FnOnce(&mut T) -> Option<&mut U>,120 pub fn try_map<F, U: ?Sized>( 121 mut this: Self, 122 f: F, 123 ) -> Result<OwnedRwLockMappedWriteGuard<T, U>, Self> 124 where 125 F: FnOnce(&mut T) -> Option<&mut U>, 126 { 127 let data = match f(&mut *this) { 128 Some(data) => data as *mut U, 129 None => return Err(this), 130 }; 131 let permits_acquired = this.permits_acquired; 132 let lock = unsafe { ManuallyDrop::take(&mut this.lock) }; 133 #[cfg(all(tokio_unstable, feature = "tracing"))] 134 let resource_span = this.resource_span.clone(); 135 136 // NB: Forget to avoid drop impl from being called. 137 mem::forget(this); 138 139 Ok(OwnedRwLockMappedWriteGuard { 140 permits_acquired, 141 lock: ManuallyDrop::new(lock), 142 data, 143 _p: PhantomData, 144 #[cfg(all(tokio_unstable, feature = "tracing"))] 145 resource_span, 146 }) 147 } 148 149 /// Converts this `OwnedRwLockWriteGuard` into an 150 /// `OwnedRwLockMappedWriteGuard`. This method can be used to store a 151 /// non-mapped guard in a struct field that expects a mapped guard. 152 /// 153 /// This is equivalent to calling `OwnedRwLockWriteGuard::map(guard, |me| me)`. 154 #[inline] into_mapped(this: Self) -> OwnedRwLockMappedWriteGuard<T>155 pub fn into_mapped(this: Self) -> OwnedRwLockMappedWriteGuard<T> { 156 Self::map(this, |me| me) 157 } 158 159 /// Atomically downgrades a write lock into a read lock without allowing 160 /// any writers to take exclusive access of the lock in the meantime. 161 /// 162 /// **Note:** This won't *necessarily* allow any additional readers to acquire 163 /// locks, since [`RwLock`] is fair and it is possible that a writer is next 164 /// in line. 165 /// 166 /// Returns an RAII guard which will drop this read access of the `RwLock` 167 /// when dropped. 168 /// 169 /// # Examples 170 /// 171 /// ``` 172 /// # use tokio::sync::RwLock; 173 /// # use std::sync::Arc; 174 /// # 175 /// # #[tokio::main] 176 /// # async fn main() { 177 /// let lock = Arc::new(RwLock::new(1)); 178 /// 179 /// let n = lock.clone().write_owned().await; 180 /// 181 /// let cloned_lock = lock.clone(); 182 /// let handle = tokio::spawn(async move { 183 /// *cloned_lock.write_owned().await = 2; 184 /// }); 185 /// 186 /// let n = n.downgrade(); 187 /// assert_eq!(*n, 1, "downgrade is atomic"); 188 /// 189 /// drop(n); 190 /// handle.await.unwrap(); 191 /// assert_eq!(*lock.read().await, 2, "second writer obtained write lock"); 192 /// # } 193 /// ``` downgrade(mut self) -> OwnedRwLockReadGuard<T>194 pub fn downgrade(mut self) -> OwnedRwLockReadGuard<T> { 195 let lock = unsafe { ManuallyDrop::take(&mut self.lock) }; 196 let data = self.data; 197 let to_release = (self.permits_acquired - 1) as usize; 198 199 // Release all but one of the permits held by the write guard 200 lock.s.release(to_release); 201 #[cfg(all(tokio_unstable, feature = "tracing"))] 202 self.resource_span.in_scope(|| { 203 tracing::trace!( 204 target: "runtime::resource::state_update", 205 write_locked = false, 206 write_locked.op = "override", 207 ) 208 }); 209 210 #[cfg(all(tokio_unstable, feature = "tracing"))] 211 self.resource_span.in_scope(|| { 212 tracing::trace!( 213 target: "runtime::resource::state_update", 214 current_readers = 1, 215 current_readers.op = "add", 216 ) 217 }); 218 219 #[cfg(all(tokio_unstable, feature = "tracing"))] 220 let resource_span = self.resource_span.clone(); 221 // NB: Forget to avoid drop impl from being called. 222 mem::forget(self); 223 224 OwnedRwLockReadGuard { 225 lock: ManuallyDrop::new(lock), 226 data, 227 _p: PhantomData, 228 #[cfg(all(tokio_unstable, feature = "tracing"))] 229 resource_span, 230 } 231 } 232 } 233 234 impl<T: ?Sized> ops::Deref for OwnedRwLockWriteGuard<T> { 235 type Target = T; 236 deref(&self) -> &T237 fn deref(&self) -> &T { 238 unsafe { &*self.data } 239 } 240 } 241 242 impl<T: ?Sized> ops::DerefMut for OwnedRwLockWriteGuard<T> { deref_mut(&mut self) -> &mut T243 fn deref_mut(&mut self) -> &mut T { 244 unsafe { &mut *self.data } 245 } 246 } 247 248 impl<T: ?Sized> fmt::Debug for OwnedRwLockWriteGuard<T> 249 where 250 T: fmt::Debug, 251 { fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result252 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 253 fmt::Debug::fmt(&**self, f) 254 } 255 } 256 257 impl<T: ?Sized> fmt::Display for OwnedRwLockWriteGuard<T> 258 where 259 T: fmt::Display, 260 { fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result261 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 262 fmt::Display::fmt(&**self, f) 263 } 264 } 265 266 impl<T: ?Sized> Drop for OwnedRwLockWriteGuard<T> { drop(&mut self)267 fn drop(&mut self) { 268 self.lock.s.release(self.permits_acquired as usize); 269 #[cfg(all(tokio_unstable, feature = "tracing"))] 270 self.resource_span.in_scope(|| { 271 tracing::trace!( 272 target: "runtime::resource::state_update", 273 write_locked = false, 274 write_locked.op = "override", 275 ) 276 }); 277 unsafe { ManuallyDrop::drop(&mut self.lock) }; 278 } 279 } 280