1 use crate::sync::batch_semaphore::Semaphore; 2 use crate::sync::rwlock::read_guard::RwLockReadGuard; 3 use crate::sync::rwlock::write_guard_mapped::RwLockMappedWriteGuard; 4 use std::fmt; 5 use std::marker; 6 use std::mem; 7 use std::ops; 8 9 /// RAII structure used to release the exclusive write access of a lock when 10 /// dropped. 11 /// 12 /// This structure is created by the [`write`] method 13 /// on [`RwLock`]. 14 /// 15 /// [`write`]: method@crate::sync::RwLock::write 16 /// [`RwLock`]: struct@crate::sync::RwLock 17 #[must_use = "if unused the RwLock will immediately unlock"] 18 pub struct RwLockWriteGuard<'a, T: ?Sized> { 19 #[cfg(all(tokio_unstable, feature = "tracing"))] 20 pub(super) resource_span: tracing::Span, 21 pub(super) permits_acquired: u32, 22 pub(super) s: &'a Semaphore, 23 pub(super) data: *mut T, 24 pub(super) marker: marker::PhantomData<&'a mut T>, 25 } 26 27 impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { 28 /// Makes a new [`RwLockMappedWriteGuard`] for a component of the locked data. 29 /// 30 /// This operation cannot fail as the `RwLockWriteGuard` passed in already 31 /// locked the data. 32 /// 33 /// This is an associated function that needs to be used as 34 /// `RwLockWriteGuard::map(..)`. A method would interfere with methods of 35 /// the same name on the contents of the locked data. 36 /// 37 /// This is an asynchronous version of [`RwLockWriteGuard::map`] from the 38 /// [`parking_lot` crate]. 39 /// 40 /// [`RwLockMappedWriteGuard`]: struct@crate::sync::RwLockMappedWriteGuard 41 /// [`RwLockWriteGuard::map`]: https://docs.rs/lock_api/latest/lock_api/struct.RwLockWriteGuard.html#method.map 42 /// [`parking_lot` crate]: https://crates.io/crates/parking_lot 43 /// 44 /// # Examples 45 /// 46 /// ``` 47 /// use tokio::sync::{RwLock, RwLockWriteGuard}; 48 /// 49 /// #[derive(Debug, Clone, Copy, PartialEq, Eq)] 50 /// struct Foo(u32); 51 /// 52 /// # #[tokio::main] 53 /// # async fn main() { 54 /// let lock = RwLock::new(Foo(1)); 55 /// 56 /// { 57 /// let mut mapped = RwLockWriteGuard::map(lock.write().await, |f| &mut f.0); 58 /// *mapped = 2; 59 /// } 60 /// 61 /// assert_eq!(Foo(2), *lock.read().await); 62 /// # } 63 /// ``` 64 #[inline] map<F, U: ?Sized>(mut this: Self, f: F) -> RwLockMappedWriteGuard<'a, U> where F: FnOnce(&mut T) -> &mut U,65 pub fn map<F, U: ?Sized>(mut this: Self, f: F) -> RwLockMappedWriteGuard<'a, U> 66 where 67 F: FnOnce(&mut T) -> &mut U, 68 { 69 let data = f(&mut *this) as *mut U; 70 let s = this.s; 71 let permits_acquired = this.permits_acquired; 72 #[cfg(all(tokio_unstable, feature = "tracing"))] 73 let resource_span = this.resource_span.clone(); 74 // NB: Forget to avoid drop impl from being called. 75 mem::forget(this); 76 RwLockMappedWriteGuard { 77 permits_acquired, 78 s, 79 data, 80 marker: marker::PhantomData, 81 #[cfg(all(tokio_unstable, feature = "tracing"))] 82 resource_span, 83 } 84 } 85 86 /// Attempts to make a new [`RwLockMappedWriteGuard`] for a component of 87 /// the locked data. The original guard is returned if the closure returns 88 /// `None`. 89 /// 90 /// This operation cannot fail as the `RwLockWriteGuard` passed in already 91 /// locked the data. 92 /// 93 /// This is an associated function that needs to be 94 /// used as `RwLockWriteGuard::try_map(...)`. A method would interfere with 95 /// methods of the same name on the contents of the locked data. 96 /// 97 /// This is an asynchronous version of [`RwLockWriteGuard::try_map`] from 98 /// the [`parking_lot` crate]. 99 /// 100 /// [`RwLockMappedWriteGuard`]: struct@crate::sync::RwLockMappedWriteGuard 101 /// [`RwLockWriteGuard::try_map`]: https://docs.rs/lock_api/latest/lock_api/struct.RwLockWriteGuard.html#method.try_map 102 /// [`parking_lot` crate]: https://crates.io/crates/parking_lot 103 /// 104 /// # Examples 105 /// 106 /// ``` 107 /// use tokio::sync::{RwLock, RwLockWriteGuard}; 108 /// 109 /// #[derive(Debug, Clone, Copy, PartialEq, Eq)] 110 /// struct Foo(u32); 111 /// 112 /// # #[tokio::main] 113 /// # async fn main() { 114 /// let lock = RwLock::new(Foo(1)); 115 /// 116 /// { 117 /// let guard = lock.write().await; 118 /// let mut guard = RwLockWriteGuard::try_map(guard, |f| Some(&mut f.0)).expect("should not fail"); 119 /// *guard = 2; 120 /// } 121 /// 122 /// assert_eq!(Foo(2), *lock.read().await); 123 /// # } 124 /// ``` 125 #[inline] try_map<F, U: ?Sized>( mut this: Self, f: F, ) -> Result<RwLockMappedWriteGuard<'a, U>, Self> where F: FnOnce(&mut T) -> Option<&mut U>,126 pub fn try_map<F, U: ?Sized>( 127 mut this: Self, 128 f: F, 129 ) -> Result<RwLockMappedWriteGuard<'a, U>, Self> 130 where 131 F: FnOnce(&mut T) -> Option<&mut U>, 132 { 133 let data = match f(&mut *this) { 134 Some(data) => data as *mut U, 135 None => return Err(this), 136 }; 137 let s = this.s; 138 let permits_acquired = this.permits_acquired; 139 #[cfg(all(tokio_unstable, feature = "tracing"))] 140 let resource_span = this.resource_span.clone(); 141 // NB: Forget to avoid drop impl from being called. 142 mem::forget(this); 143 Ok(RwLockMappedWriteGuard { 144 permits_acquired, 145 s, 146 data, 147 marker: marker::PhantomData, 148 #[cfg(all(tokio_unstable, feature = "tracing"))] 149 resource_span, 150 }) 151 } 152 153 /// Converts this `RwLockWriteGuard` into an `RwLockMappedWriteGuard`. This 154 /// method can be used to store a non-mapped guard in a struct field that 155 /// expects a mapped guard. 156 /// 157 /// This is equivalent to calling `RwLockWriteGuard::map(guard, |me| me)`. 158 #[inline] into_mapped(this: Self) -> RwLockMappedWriteGuard<'a, T>159 pub fn into_mapped(this: Self) -> RwLockMappedWriteGuard<'a, T> { 160 RwLockWriteGuard::map(this, |me| me) 161 } 162 163 /// Atomically downgrades a write lock into a read lock without allowing 164 /// any writers to take exclusive access of the lock in the meantime. 165 /// 166 /// **Note:** This won't *necessarily* allow any additional readers to acquire 167 /// locks, since [`RwLock`] is fair and it is possible that a writer is next 168 /// in line. 169 /// 170 /// Returns an RAII guard which will drop this read access of the `RwLock` 171 /// when dropped. 172 /// 173 /// # Examples 174 /// 175 /// ``` 176 /// # use tokio::sync::RwLock; 177 /// # use std::sync::Arc; 178 /// # 179 /// # #[tokio::main] 180 /// # async fn main() { 181 /// let lock = Arc::new(RwLock::new(1)); 182 /// 183 /// let n = lock.write().await; 184 /// 185 /// let cloned_lock = lock.clone(); 186 /// let handle = tokio::spawn(async move { 187 /// *cloned_lock.write().await = 2; 188 /// }); 189 /// 190 /// let n = n.downgrade(); 191 /// assert_eq!(*n, 1, "downgrade is atomic"); 192 /// 193 /// drop(n); 194 /// handle.await.unwrap(); 195 /// assert_eq!(*lock.read().await, 2, "second writer obtained write lock"); 196 /// # } 197 /// ``` 198 /// 199 /// [`RwLock`]: struct@crate::sync::RwLock downgrade(self) -> RwLockReadGuard<'a, T>200 pub fn downgrade(self) -> RwLockReadGuard<'a, T> { 201 let RwLockWriteGuard { s, data, .. } = self; 202 let to_release = (self.permits_acquired - 1) as usize; 203 // Release all but one of the permits held by the write guard 204 s.release(to_release); 205 #[cfg(all(tokio_unstable, feature = "tracing"))] 206 self.resource_span.in_scope(|| { 207 tracing::trace!( 208 target: "runtime::resource::state_update", 209 write_locked = false, 210 write_locked.op = "override", 211 ) 212 }); 213 214 #[cfg(all(tokio_unstable, feature = "tracing"))] 215 self.resource_span.in_scope(|| { 216 tracing::trace!( 217 target: "runtime::resource::state_update", 218 current_readers = 1, 219 current_readers.op = "add", 220 ) 221 }); 222 223 #[cfg(all(tokio_unstable, feature = "tracing"))] 224 let resource_span = self.resource_span.clone(); 225 // NB: Forget to avoid drop impl from being called. 226 mem::forget(self); 227 228 RwLockReadGuard { 229 s, 230 data, 231 marker: marker::PhantomData, 232 #[cfg(all(tokio_unstable, feature = "tracing"))] 233 resource_span, 234 } 235 } 236 } 237 238 impl<T: ?Sized> ops::Deref for RwLockWriteGuard<'_, T> { 239 type Target = T; 240 deref(&self) -> &T241 fn deref(&self) -> &T { 242 unsafe { &*self.data } 243 } 244 } 245 246 impl<T: ?Sized> ops::DerefMut for RwLockWriteGuard<'_, T> { deref_mut(&mut self) -> &mut T247 fn deref_mut(&mut self) -> &mut T { 248 unsafe { &mut *self.data } 249 } 250 } 251 252 impl<'a, T: ?Sized> fmt::Debug for RwLockWriteGuard<'a, T> 253 where 254 T: fmt::Debug, 255 { fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result256 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 257 fmt::Debug::fmt(&**self, f) 258 } 259 } 260 261 impl<'a, T: ?Sized> fmt::Display for RwLockWriteGuard<'a, T> 262 where 263 T: fmt::Display, 264 { fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result265 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 266 fmt::Display::fmt(&**self, f) 267 } 268 } 269 270 impl<'a, T: ?Sized> Drop for RwLockWriteGuard<'a, T> { drop(&mut self)271 fn drop(&mut self) { 272 self.s.release(self.permits_acquired as usize); 273 274 #[cfg(all(tokio_unstable, feature = "tracing"))] 275 self.resource_span.in_scope(|| { 276 tracing::trace!( 277 target: "runtime::resource::state_update", 278 write_locked = false, 279 write_locked.op = "override", 280 ) 281 }); 282 } 283 } 284