1 // SPDX-License-Identifier: GPL-2.0 2 3 //! Kernel types. 4 5 use crate::init::{self, PinInit}; 6 use core::{ 7 cell::UnsafeCell, 8 marker::{PhantomData, PhantomPinned}, 9 mem::{ManuallyDrop, MaybeUninit}, 10 ops::{Deref, DerefMut}, 11 ptr::NonNull, 12 }; 13 14 /// Used to transfer ownership to and from foreign (non-Rust) languages. 15 /// 16 /// Ownership is transferred from Rust to a foreign language by calling [`Self::into_foreign`] and 17 /// later may be transferred back to Rust by calling [`Self::from_foreign`]. 18 /// 19 /// This trait is meant to be used in cases when Rust objects are stored in C objects and 20 /// eventually "freed" back to Rust. 21 pub trait ForeignOwnable: Sized { 22 /// Type of values borrowed between calls to [`ForeignOwnable::into_foreign`] and 23 /// [`ForeignOwnable::from_foreign`]. 24 type Borrowed<'a>; 25 26 /// Converts a Rust-owned object to a foreign-owned one. 27 /// 28 /// The foreign representation is a pointer to void. There are no guarantees for this pointer. 29 /// For example, it might be invalid, dangling or pointing to uninitialized memory. Using it in 30 /// any way except for [`ForeignOwnable::from_foreign`], [`ForeignOwnable::borrow`], 31 /// [`ForeignOwnable::try_from_foreign`] can result in undefined behavior. 32 fn into_foreign(self) -> *const crate::ffi::c_void; 33 34 /// Borrows a foreign-owned object. 35 /// 36 /// # Safety 37 /// 38 /// `ptr` must have been returned by a previous call to [`ForeignOwnable::into_foreign`] for 39 /// which a previous matching [`ForeignOwnable::from_foreign`] hasn't been called yet. 40 unsafe fn borrow<'a>(ptr: *const crate::ffi::c_void) -> Self::Borrowed<'a>; 41 42 /// Converts a foreign-owned object back to a Rust-owned one. 43 /// 44 /// # Safety 45 /// 46 /// `ptr` must have been returned by a previous call to [`ForeignOwnable::into_foreign`] for 47 /// which a previous matching [`ForeignOwnable::from_foreign`] hasn't been called yet. 48 /// Additionally, all instances (if any) of values returned by [`ForeignOwnable::borrow`] for 49 /// this object must have been dropped. 50 unsafe fn from_foreign(ptr: *const crate::ffi::c_void) -> Self; 51 52 /// Tries to convert a foreign-owned object back to a Rust-owned one. 53 /// 54 /// A convenience wrapper over [`ForeignOwnable::from_foreign`] that returns [`None`] if `ptr` 55 /// is null. 56 /// 57 /// # Safety 58 /// 59 /// `ptr` must either be null or satisfy the safety requirements for 60 /// [`ForeignOwnable::from_foreign`]. 61 unsafe fn try_from_foreign(ptr: *const crate::ffi::c_void) -> Option<Self> { 62 if ptr.is_null() { 63 None 64 } else { 65 // SAFETY: Since `ptr` is not null here, then `ptr` satisfies the safety requirements 66 // of `from_foreign` given the safety requirements of this function. 67 unsafe { Some(Self::from_foreign(ptr)) } 68 } 69 } 70 } 71 72 impl ForeignOwnable for () { 73 type Borrowed<'a> = (); 74 75 fn into_foreign(self) -> *const crate::ffi::c_void { 76 core::ptr::NonNull::dangling().as_ptr() 77 } 78 79 unsafe fn borrow<'a>(_: *const crate::ffi::c_void) -> Self::Borrowed<'a> {} 80 81 unsafe fn from_foreign(_: *const crate::ffi::c_void) -> Self {} 82 } 83 84 /// Runs a cleanup function/closure when dropped. 85 /// 86 /// The [`ScopeGuard::dismiss`] function prevents the cleanup function from running. 87 /// 88 /// # Examples 89 /// 90 /// In the example below, we have multiple exit paths and we want to log regardless of which one is 91 /// taken: 92 /// 93 /// ``` 94 /// # use kernel::types::ScopeGuard; 95 /// fn example1(arg: bool) { 96 /// let _log = ScopeGuard::new(|| pr_info!("example1 completed\n")); 97 /// 98 /// if arg { 99 /// return; 100 /// } 101 /// 102 /// pr_info!("Do something...\n"); 103 /// } 104 /// 105 /// # example1(false); 106 /// # example1(true); 107 /// ``` 108 /// 109 /// In the example below, we want to log the same message on all early exits but a different one on 110 /// the main exit path: 111 /// 112 /// ``` 113 /// # use kernel::types::ScopeGuard; 114 /// fn example2(arg: bool) { 115 /// let log = ScopeGuard::new(|| pr_info!("example2 returned early\n")); 116 /// 117 /// if arg { 118 /// return; 119 /// } 120 /// 121 /// // (Other early returns...) 122 /// 123 /// log.dismiss(); 124 /// pr_info!("example2 no early return\n"); 125 /// } 126 /// 127 /// # example2(false); 128 /// # example2(true); 129 /// ``` 130 /// 131 /// In the example below, we need a mutable object (the vector) to be accessible within the log 132 /// function, so we wrap it in the [`ScopeGuard`]: 133 /// 134 /// ``` 135 /// # use kernel::types::ScopeGuard; 136 /// fn example3(arg: bool) -> Result { 137 /// let mut vec = 138 /// ScopeGuard::new_with_data(KVec::new(), |v| pr_info!("vec had {} elements\n", v.len())); 139 /// 140 /// vec.push(10u8, GFP_KERNEL)?; 141 /// if arg { 142 /// return Ok(()); 143 /// } 144 /// vec.push(20u8, GFP_KERNEL)?; 145 /// Ok(()) 146 /// } 147 /// 148 /// # assert_eq!(example3(false), Ok(())); 149 /// # assert_eq!(example3(true), Ok(())); 150 /// ``` 151 /// 152 /// # Invariants 153 /// 154 /// The value stored in the struct is nearly always `Some(_)`, except between 155 /// [`ScopeGuard::dismiss`] and [`ScopeGuard::drop`]: in this case, it will be `None` as the value 156 /// will have been returned to the caller. Since [`ScopeGuard::dismiss`] consumes the guard, 157 /// callers won't be able to use it anymore. 158 pub struct ScopeGuard<T, F: FnOnce(T)>(Option<(T, F)>); 159 160 impl<T, F: FnOnce(T)> ScopeGuard<T, F> { 161 /// Creates a new guarded object wrapping the given data and with the given cleanup function. 162 pub fn new_with_data(data: T, cleanup_func: F) -> Self { 163 // INVARIANT: The struct is being initialised with `Some(_)`. 164 Self(Some((data, cleanup_func))) 165 } 166 167 /// Prevents the cleanup function from running and returns the guarded data. 168 pub fn dismiss(mut self) -> T { 169 // INVARIANT: This is the exception case in the invariant; it is not visible to callers 170 // because this function consumes `self`. 171 self.0.take().unwrap().0 172 } 173 } 174 175 impl ScopeGuard<(), fn(())> { 176 /// Creates a new guarded object with the given cleanup function. 177 pub fn new(cleanup: impl FnOnce()) -> ScopeGuard<(), impl FnOnce(())> { 178 ScopeGuard::new_with_data((), move |()| cleanup()) 179 } 180 } 181 182 impl<T, F: FnOnce(T)> Deref for ScopeGuard<T, F> { 183 type Target = T; 184 185 fn deref(&self) -> &T { 186 // The type invariants guarantee that `unwrap` will succeed. 187 &self.0.as_ref().unwrap().0 188 } 189 } 190 191 impl<T, F: FnOnce(T)> DerefMut for ScopeGuard<T, F> { 192 fn deref_mut(&mut self) -> &mut T { 193 // The type invariants guarantee that `unwrap` will succeed. 194 &mut self.0.as_mut().unwrap().0 195 } 196 } 197 198 impl<T, F: FnOnce(T)> Drop for ScopeGuard<T, F> { 199 fn drop(&mut self) { 200 // Run the cleanup function if one is still present. 201 if let Some((data, cleanup)) = self.0.take() { 202 cleanup(data) 203 } 204 } 205 } 206 207 /// Stores an opaque value. 208 /// 209 /// `Opaque<T>` is meant to be used with FFI objects that are never interpreted by Rust code. 210 /// 211 /// It is used to wrap structs from the C side, like for example `Opaque<bindings::mutex>`. 212 /// It gets rid of all the usual assumptions that Rust has for a value: 213 /// 214 /// * The value is allowed to be uninitialized (for example have invalid bit patterns: `3` for a 215 /// [`bool`]). 216 /// * The value is allowed to be mutated, when a `&Opaque<T>` exists on the Rust side. 217 /// * No uniqueness for mutable references: it is fine to have multiple `&mut Opaque<T>` point to 218 /// the same value. 219 /// * The value is not allowed to be shared with other threads (i.e. it is `!Sync`). 220 /// 221 /// This has to be used for all values that the C side has access to, because it can't be ensured 222 /// that the C side is adhering to the usual constraints that Rust needs. 223 /// 224 /// Using `Opaque<T>` allows to continue to use references on the Rust side even for values shared 225 /// with C. 226 /// 227 /// # Examples 228 /// 229 /// ``` 230 /// # #![expect(unreachable_pub, clippy::disallowed_names)] 231 /// use kernel::types::Opaque; 232 /// # // Emulate a C struct binding which is from C, maybe uninitialized or not, only the C side 233 /// # // knows. 234 /// # mod bindings { 235 /// # pub struct Foo { 236 /// # pub val: u8, 237 /// # } 238 /// # } 239 /// 240 /// // `foo.val` is assumed to be handled on the C side, so we use `Opaque` to wrap it. 241 /// pub struct Foo { 242 /// foo: Opaque<bindings::Foo>, 243 /// } 244 /// 245 /// impl Foo { 246 /// pub fn get_val(&self) -> u8 { 247 /// let ptr = Opaque::get(&self.foo); 248 /// 249 /// // SAFETY: `Self` is valid from C side. 250 /// unsafe { (*ptr).val } 251 /// } 252 /// } 253 /// 254 /// // Create an instance of `Foo` with the `Opaque` wrapper. 255 /// let foo = Foo { 256 /// foo: Opaque::new(bindings::Foo { val: 0xdb }), 257 /// }; 258 /// 259 /// assert_eq!(foo.get_val(), 0xdb); 260 /// ``` 261 #[repr(transparent)] 262 pub struct Opaque<T> { 263 value: UnsafeCell<MaybeUninit<T>>, 264 _pin: PhantomPinned, 265 } 266 267 impl<T> Opaque<T> { 268 /// Creates a new opaque value. 269 pub const fn new(value: T) -> Self { 270 Self { 271 value: UnsafeCell::new(MaybeUninit::new(value)), 272 _pin: PhantomPinned, 273 } 274 } 275 276 /// Creates an uninitialised value. 277 pub const fn uninit() -> Self { 278 Self { 279 value: UnsafeCell::new(MaybeUninit::uninit()), 280 _pin: PhantomPinned, 281 } 282 } 283 284 /// Creates a pin-initializer from the given initializer closure. 285 /// 286 /// The returned initializer calls the given closure with the pointer to the inner `T` of this 287 /// `Opaque`. Since this memory is uninitialized, the closure is not allowed to read from it. 288 /// 289 /// This function is safe, because the `T` inside of an `Opaque` is allowed to be 290 /// uninitialized. Additionally, access to the inner `T` requires `unsafe`, so the caller needs 291 /// to verify at that point that the inner value is valid. 292 pub fn ffi_init(init_func: impl FnOnce(*mut T)) -> impl PinInit<Self> { 293 // SAFETY: We contain a `MaybeUninit`, so it is OK for the `init_func` to not fully 294 // initialize the `T`. 295 unsafe { 296 init::pin_init_from_closure::<_, ::core::convert::Infallible>(move |slot| { 297 init_func(Self::raw_get(slot)); 298 Ok(()) 299 }) 300 } 301 } 302 303 /// Returns a raw pointer to the opaque data. 304 pub const fn get(&self) -> *mut T { 305 UnsafeCell::get(&self.value).cast::<T>() 306 } 307 308 /// Gets the value behind `this`. 309 /// 310 /// This function is useful to get access to the value without creating intermediate 311 /// references. 312 pub const fn raw_get(this: *const Self) -> *mut T { 313 UnsafeCell::raw_get(this.cast::<UnsafeCell<MaybeUninit<T>>>()).cast::<T>() 314 } 315 } 316 317 /// Types that are _always_ reference counted. 318 /// 319 /// It allows such types to define their own custom ref increment and decrement functions. 320 /// Additionally, it allows users to convert from a shared reference `&T` to an owned reference 321 /// [`ARef<T>`]. 322 /// 323 /// This is usually implemented by wrappers to existing structures on the C side of the code. For 324 /// Rust code, the recommendation is to use [`Arc`](crate::sync::Arc) to create reference-counted 325 /// instances of a type. 326 /// 327 /// # Safety 328 /// 329 /// Implementers must ensure that increments to the reference count keep the object alive in memory 330 /// at least until matching decrements are performed. 331 /// 332 /// Implementers must also ensure that all instances are reference-counted. (Otherwise they 333 /// won't be able to honour the requirement that [`AlwaysRefCounted::inc_ref`] keep the object 334 /// alive.) 335 pub unsafe trait AlwaysRefCounted { 336 /// Increments the reference count on the object. 337 fn inc_ref(&self); 338 339 /// Decrements the reference count on the object. 340 /// 341 /// Frees the object when the count reaches zero. 342 /// 343 /// # Safety 344 /// 345 /// Callers must ensure that there was a previous matching increment to the reference count, 346 /// and that the object is no longer used after its reference count is decremented (as it may 347 /// result in the object being freed), unless the caller owns another increment on the refcount 348 /// (e.g., it calls [`AlwaysRefCounted::inc_ref`] twice, then calls 349 /// [`AlwaysRefCounted::dec_ref`] once). 350 unsafe fn dec_ref(obj: NonNull<Self>); 351 } 352 353 /// An owned reference to an always-reference-counted object. 354 /// 355 /// The object's reference count is automatically decremented when an instance of [`ARef`] is 356 /// dropped. It is also automatically incremented when a new instance is created via 357 /// [`ARef::clone`]. 358 /// 359 /// # Invariants 360 /// 361 /// The pointer stored in `ptr` is non-null and valid for the lifetime of the [`ARef`] instance. In 362 /// particular, the [`ARef`] instance owns an increment on the underlying object's reference count. 363 pub struct ARef<T: AlwaysRefCounted> { 364 ptr: NonNull<T>, 365 _p: PhantomData<T>, 366 } 367 368 // SAFETY: It is safe to send `ARef<T>` to another thread when the underlying `T` is `Sync` because 369 // it effectively means sharing `&T` (which is safe because `T` is `Sync`); additionally, it needs 370 // `T` to be `Send` because any thread that has an `ARef<T>` may ultimately access `T` using a 371 // mutable reference, for example, when the reference count reaches zero and `T` is dropped. 372 unsafe impl<T: AlwaysRefCounted + Sync + Send> Send for ARef<T> {} 373 374 // SAFETY: It is safe to send `&ARef<T>` to another thread when the underlying `T` is `Sync` 375 // because it effectively means sharing `&T` (which is safe because `T` is `Sync`); additionally, 376 // it needs `T` to be `Send` because any thread that has a `&ARef<T>` may clone it and get an 377 // `ARef<T>` on that thread, so the thread may ultimately access `T` using a mutable reference, for 378 // example, when the reference count reaches zero and `T` is dropped. 379 unsafe impl<T: AlwaysRefCounted + Sync + Send> Sync for ARef<T> {} 380 381 impl<T: AlwaysRefCounted> ARef<T> { 382 /// Creates a new instance of [`ARef`]. 383 /// 384 /// It takes over an increment of the reference count on the underlying object. 385 /// 386 /// # Safety 387 /// 388 /// Callers must ensure that the reference count was incremented at least once, and that they 389 /// are properly relinquishing one increment. That is, if there is only one increment, callers 390 /// must not use the underlying object anymore -- it is only safe to do so via the newly 391 /// created [`ARef`]. 392 pub unsafe fn from_raw(ptr: NonNull<T>) -> Self { 393 // INVARIANT: The safety requirements guarantee that the new instance now owns the 394 // increment on the refcount. 395 Self { 396 ptr, 397 _p: PhantomData, 398 } 399 } 400 401 /// Consumes the `ARef`, returning a raw pointer. 402 /// 403 /// This function does not change the refcount. After calling this function, the caller is 404 /// responsible for the refcount previously managed by the `ARef`. 405 /// 406 /// # Examples 407 /// 408 /// ``` 409 /// use core::ptr::NonNull; 410 /// use kernel::types::{ARef, AlwaysRefCounted}; 411 /// 412 /// struct Empty {} 413 /// 414 /// # // SAFETY: TODO. 415 /// unsafe impl AlwaysRefCounted for Empty { 416 /// fn inc_ref(&self) {} 417 /// unsafe fn dec_ref(_obj: NonNull<Self>) {} 418 /// } 419 /// 420 /// let mut data = Empty {}; 421 /// let ptr = NonNull::<Empty>::new(&mut data as *mut _).unwrap(); 422 /// # // SAFETY: TODO. 423 /// let data_ref: ARef<Empty> = unsafe { ARef::from_raw(ptr) }; 424 /// let raw_ptr: NonNull<Empty> = ARef::into_raw(data_ref); 425 /// 426 /// assert_eq!(ptr, raw_ptr); 427 /// ``` 428 pub fn into_raw(me: Self) -> NonNull<T> { 429 ManuallyDrop::new(me).ptr 430 } 431 } 432 433 impl<T: AlwaysRefCounted> Clone for ARef<T> { 434 fn clone(&self) -> Self { 435 self.inc_ref(); 436 // SAFETY: We just incremented the refcount above. 437 unsafe { Self::from_raw(self.ptr) } 438 } 439 } 440 441 impl<T: AlwaysRefCounted> Deref for ARef<T> { 442 type Target = T; 443 444 fn deref(&self) -> &Self::Target { 445 // SAFETY: The type invariants guarantee that the object is valid. 446 unsafe { self.ptr.as_ref() } 447 } 448 } 449 450 impl<T: AlwaysRefCounted> From<&T> for ARef<T> { 451 fn from(b: &T) -> Self { 452 b.inc_ref(); 453 // SAFETY: We just incremented the refcount above. 454 unsafe { Self::from_raw(NonNull::from(b)) } 455 } 456 } 457 458 impl<T: AlwaysRefCounted> Drop for ARef<T> { 459 fn drop(&mut self) { 460 // SAFETY: The type invariants guarantee that the `ARef` owns the reference we're about to 461 // decrement. 462 unsafe { T::dec_ref(self.ptr) }; 463 } 464 } 465 466 /// A sum type that always holds either a value of type `L` or `R`. 467 /// 468 /// # Examples 469 /// 470 /// ``` 471 /// use kernel::types::Either; 472 /// 473 /// let left_value: Either<i32, &str> = Either::Left(7); 474 /// let right_value: Either<i32, &str> = Either::Right("right value"); 475 /// ``` 476 pub enum Either<L, R> { 477 /// Constructs an instance of [`Either`] containing a value of type `L`. 478 Left(L), 479 480 /// Constructs an instance of [`Either`] containing a value of type `R`. 481 Right(R), 482 } 483