1 /* 2 * Copyright (c) 2024 Google Inc. All rights reserved 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining 5 * a copy of this software and associated documentation files 6 * (the "Software"), to deal in the Software without restriction, 7 * including without limitation the rights to use, copy, modify, merge, 8 * publish, distribute, sublicense, and/or sell copies of the Software, 9 * and to permit persons to whom the Software is furnished to do so, 10 * subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be 13 * included in all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 18 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 19 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 20 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 21 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 22 */ 23 24 use alloc::boxed::Box; 25 26 use core::ffi::c_void; 27 use core::ptr::null_mut; 28 29 pub use crate::sys::handle_close; 30 pub use crate::sys::handle_decref; 31 pub use crate::sys::handle_wait; 32 33 pub use crate::sys::IPC_HANDLE_POLL_ERROR; 34 pub use crate::sys::IPC_HANDLE_POLL_HUP; 35 pub use crate::sys::IPC_HANDLE_POLL_MSG; 36 pub use crate::sys::IPC_HANDLE_POLL_NONE; 37 pub use crate::sys::IPC_HANDLE_POLL_READY; 38 pub use crate::sys::IPC_HANDLE_POLL_SEND_UNBLOCKED; 39 40 pub use crate::sys::handle; 41 pub use crate::sys::handle_ref; 42 43 use crate::handle_set::handle_set_detach_ref; 44 use crate::sys::handle_ref_is_attached; 45 use crate::sys::list_node; 46 47 impl Default for list_node { default() -> Self48 fn default() -> Self { 49 Self { prev: core::ptr::null_mut(), next: core::ptr::null_mut() } 50 } 51 } 52 53 // nodes in a linked list refer to adjacent nodes by address and should be pinned 54 // TODO: add Unpin as a negative trait bound once the rustc feature is stabilized. 55 // impl !Unpin for list_node {} 56 57 impl Default for handle_ref { default() -> Self58 fn default() -> Self { 59 Self { 60 set_node: Default::default(), 61 ready_node: Default::default(), 62 uctx_node: Default::default(), 63 waiter: Default::default(), 64 parent: core::ptr::null_mut(), 65 handle: core::ptr::null_mut(), 66 id: 0, 67 emask: 0, 68 cookie: core::ptr::null_mut(), 69 } 70 } 71 } 72 73 // `handle_ref`s should not move since they are inserted as nodes in linked lists 74 // and the kernel may write back to the non-node fields as well. 75 // TODO: add Unpin as a negative trait bound once the rustc feature is stabilized. 76 // impl !Unpin for handle_ref {} 77 78 #[derive(Default)] 79 pub struct HandleRef { 80 // Box the `handle_ref` so it doesn't get moved with the `HandleRef` 81 inner: Box<handle_ref>, 82 } 83 84 impl HandleRef { is_attached(&self) -> bool85 pub fn is_attached(&self) -> bool { 86 // SAFETY: `self.inner` was initialized, and `handle_ref_is_attached` 87 // is otherwise safe to call no matter the state of the `handle_ref`. 88 unsafe { handle_ref_is_attached(&*self.inner) } 89 } 90 detach(&mut self)91 pub fn detach(&mut self) { 92 if self.is_attached() { 93 // Safety: `inner` was initialized and attached to a handle set 94 unsafe { handle_set_detach_ref(&mut *self.inner) } 95 } 96 } 97 handle_close(&mut self)98 pub fn handle_close(&mut self) { 99 if !self.inner.handle.is_null() { 100 // Safety: `handle` is non-null so it wasn't closed 101 unsafe { handle_close(self.inner.handle) }; 102 self.inner.handle = null_mut(); 103 } 104 } 105 handle_decref(&mut self)106 pub fn handle_decref(&mut self) { 107 if self.inner.handle.is_null() { 108 panic!("handle is null; can't decrease its reference count"); 109 } 110 111 // Safety: `handle` is non-null so it wasn't closed 112 unsafe { handle_decref(self.inner.handle) }; 113 } 114 as_ptr(&self) -> *const handle_ref115 pub fn as_ptr(&self) -> *const handle_ref { 116 Box::as_ptr(&self.inner) 117 } 118 as_mut_ptr(&mut self) -> *mut handle_ref119 pub fn as_mut_ptr(&mut self) -> *mut handle_ref { 120 Box::as_mut_ptr(&mut self.inner) 121 } 122 cookie(&self) -> *mut c_void123 pub fn cookie(&self) -> *mut c_void { 124 self.inner.cookie 125 } 126 set_cookie(&mut self, cookie: *mut c_void)127 pub fn set_cookie(&mut self, cookie: *mut c_void) { 128 self.inner.cookie = cookie; 129 } 130 emask(&self) -> u32131 pub fn emask(&self) -> u32 { 132 self.inner.emask 133 } 134 set_emask(&mut self, emask: u32)135 pub fn set_emask(&mut self, emask: u32) { 136 self.inner.emask = emask; 137 } 138 handle(&mut self) -> *mut handle139 pub fn handle(&mut self) -> *mut handle { 140 self.inner.handle 141 } 142 id(&mut self) -> u32143 pub fn id(&mut self) -> u32 { 144 self.inner.id 145 } 146 set_id(&mut self, id: u32)147 pub fn set_id(&mut self, id: u32) { 148 self.inner.id = id; 149 } 150 } 151 152 impl Drop for HandleRef { drop(&mut self)153 fn drop(&mut self) { 154 self.detach() 155 } 156 } 157 158 // Safety: the kernel synchronizes operations on handle refs so they can be passed 159 // from one thread to another 160 unsafe impl Send for HandleRef {} 161 162 // Safety: the kernel synchronizes operations on handle refs so it safe to share 163 // references between threads 164 unsafe impl Sync for HandleRef {} 165