diff options
Diffstat (limited to 'rust/kernel/sync')
| -rw-r--r-- | rust/kernel/sync/arc.rs | 81 | ||||
| -rw-r--r-- | rust/kernel/sync/condvar.rs | 6 | ||||
| -rw-r--r-- | rust/kernel/sync/lock.rs | 7 | ||||
| -rw-r--r-- | rust/kernel/sync/lock/mutex.rs | 2 | ||||
| -rw-r--r-- | rust/kernel/sync/lock/spinlock.rs | 2 | ||||
| -rw-r--r-- | rust/kernel/sync/poll.rs | 4 |
6 files changed, 85 insertions, 17 deletions
diff --git a/rust/kernel/sync/arc.rs b/rust/kernel/sync/arc.rs index 3cefda7a4372..8484c814609a 100644 --- a/rust/kernel/sync/arc.rs +++ b/rust/kernel/sync/arc.rs @@ -19,7 +19,7 @@ use crate::{ alloc::{AllocError, Flags, KBox}, bindings, - init::{self, InPlaceInit, Init, PinInit}, + init::InPlaceInit, try_init, types::{ForeignOwnable, Opaque}, }; @@ -32,7 +32,7 @@ use core::{ pin::Pin, ptr::NonNull, }; -use macros::pin_data; +use pin_init::{self, pin_data, InPlaceWrite, Init, PinInit}; mod std_vendor; @@ -202,6 +202,26 @@ unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {} // the reference count reaches zero and `T` is dropped. unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {} +impl<T> InPlaceInit<T> for Arc<T> { + type PinnedSelf = Self; + + #[inline] + fn try_pin_init<E>(init: impl PinInit<T, E>, flags: Flags) -> Result<Self::PinnedSelf, E> + where + E: From<AllocError>, + { + UniqueArc::try_pin_init(init, flags).map(|u| u.into()) + } + + #[inline] + fn try_init<E>(init: impl Init<T, E>, flags: Flags) -> Result<Self, E> + where + E: From<AllocError>, + { + UniqueArc::try_init(init, flags).map(|u| u.into()) + } +} + impl<T> Arc<T> { /// Constructs a new reference counted instance of `T`. pub fn new(contents: T, flags: Flags) -> Result<Self, AllocError> { @@ -246,6 +266,15 @@ impl<T: ?Sized> Arc<T> { unsafe { core::ptr::addr_of!((*ptr).data) } } + /// Return a raw pointer to the data in this arc. + pub fn as_ptr(this: &Self) -> *const T { + let ptr = this.ptr.as_ptr(); + + // SAFETY: As `ptr` points to a valid allocation of type `ArcInner`, + // field projection to `data`is within bounds of the allocation. + unsafe { core::ptr::addr_of!((*ptr).data) } + } + /// Recreates an [`Arc`] instance previously deconstructed via [`Arc::into_raw`]. /// /// # Safety @@ -539,11 +568,11 @@ impl<T: ?Sized> ArcBorrow<'_, T> { } /// Creates an [`ArcBorrow`] to an [`Arc`] that has previously been deconstructed with - /// [`Arc::into_raw`]. + /// [`Arc::into_raw`] or [`Arc::as_ptr`]. /// /// # Safety /// - /// * The provided pointer must originate from a call to [`Arc::into_raw`]. + /// * The provided pointer must originate from a call to [`Arc::into_raw`] or [`Arc::as_ptr`]. /// * For the duration of the lifetime annotated on this `ArcBorrow`, the reference count must /// not hit zero. /// * For the duration of the lifetime annotated on this `ArcBorrow`, there must not be a @@ -659,6 +688,48 @@ pub struct UniqueArc<T: ?Sized> { inner: Arc<T>, } +impl<T> InPlaceInit<T> for UniqueArc<T> { + type PinnedSelf = Pin<Self>; + + #[inline] + fn try_pin_init<E>(init: impl PinInit<T, E>, flags: Flags) -> Result<Self::PinnedSelf, E> + where + E: From<AllocError>, + { + UniqueArc::new_uninit(flags)?.write_pin_init(init) + } + + #[inline] + fn try_init<E>(init: impl Init<T, E>, flags: Flags) -> Result<Self, E> + where + E: From<AllocError>, + { + UniqueArc::new_uninit(flags)?.write_init(init) + } +} + +impl<T> InPlaceWrite<T> for UniqueArc<MaybeUninit<T>> { + type Initialized = UniqueArc<T>; + + fn write_init<E>(mut self, init: impl Init<T, E>) -> Result<Self::Initialized, E> { + let slot = self.as_mut_ptr(); + // SAFETY: When init errors/panics, slot will get deallocated but not dropped, + // slot is valid. + unsafe { init.__init(slot)? }; + // SAFETY: All fields have been initialized. + Ok(unsafe { self.assume_init() }) + } + + fn write_pin_init<E>(mut self, init: impl PinInit<T, E>) -> Result<Pin<Self::Initialized>, E> { + let slot = self.as_mut_ptr(); + // SAFETY: When init errors/panics, slot will get deallocated but not dropped, + // slot is valid and will not be moved, because we pin it later. + unsafe { init.__pinned_init(slot)? }; + // SAFETY: All fields have been initialized. + Ok(unsafe { self.assume_init() }.into()) + } +} + impl<T> UniqueArc<T> { /// Tries to allocate a new [`UniqueArc`] instance. pub fn new(value: T, flags: Flags) -> Result<Self, AllocError> { @@ -675,7 +746,7 @@ impl<T> UniqueArc<T> { try_init!(ArcInner { // SAFETY: There are no safety requirements for this FFI call. refcount: Opaque::new(unsafe { bindings::REFCOUNT_INIT(1) }), - data <- init::uninit::<T, AllocError>(), + data <- pin_init::uninit::<T, AllocError>(), }? AllocError), flags, )?; diff --git a/rust/kernel/sync/condvar.rs b/rust/kernel/sync/condvar.rs index fbf68ada582f..caebf03f553b 100644 --- a/rust/kernel/sync/condvar.rs +++ b/rust/kernel/sync/condvar.rs @@ -8,8 +8,6 @@ use super::{lock::Backend, lock::Guard, LockClassKey}; use crate::{ ffi::{c_int, c_long}, - init::PinInit, - pin_init, str::CStr, task::{ MAX_SCHEDULE_TIMEOUT, TASK_FREEZABLE, TASK_INTERRUPTIBLE, TASK_NORMAL, TASK_UNINTERRUPTIBLE, @@ -18,7 +16,7 @@ use crate::{ types::Opaque, }; use core::{marker::PhantomPinned, pin::Pin, ptr}; -use macros::pin_data; +use pin_init::{pin_data, pin_init, PinInit}; /// Creates a [`CondVar`] initialiser with the given name and a newly-created lock class. #[macro_export] @@ -38,7 +36,7 @@ pub use new_condvar; /// spuriously. /// /// Instances of [`CondVar`] need a lock class and to be pinned. The recommended way to create such -/// instances is with the [`pin_init`](crate::pin_init) and [`new_condvar`] macros. +/// instances is with the [`pin_init`](crate::pin_init!) and [`new_condvar`] macros. /// /// # Examples /// diff --git a/rust/kernel/sync/lock.rs b/rust/kernel/sync/lock.rs index 360a10a9216d..e82fa5be289c 100644 --- a/rust/kernel/sync/lock.rs +++ b/rust/kernel/sync/lock.rs @@ -7,13 +7,11 @@ use super::LockClassKey; use crate::{ - init::PinInit, - pin_init, str::CStr, types::{NotThreadSafe, Opaque, ScopeGuard}, }; use core::{cell::UnsafeCell, marker::PhantomPinned, pin::Pin}; -use macros::pin_data; +use pin_init::{pin_data, pin_init, PinInit}; pub mod mutex; pub mod spinlock; @@ -208,7 +206,8 @@ impl<'a, T: ?Sized, B: Backend> Guard<'a, T, B> { /// lock is held. /// /// ``` - /// # use kernel::{new_spinlock, stack_pin_init, sync::lock::{Backend, Guard, Lock}}; + /// # use kernel::{new_spinlock, sync::lock::{Backend, Guard, Lock}}; + /// # use pin_init::stack_pin_init; /// /// fn assert_held<T, B: Backend>(guard: &Guard<'_, T, B>, lock: &Lock<T, B>) { /// // Address-equal means the same lock. diff --git a/rust/kernel/sync/lock/mutex.rs b/rust/kernel/sync/lock/mutex.rs index 70cadbc2e8e2..581cee7ab842 100644 --- a/rust/kernel/sync/lock/mutex.rs +++ b/rust/kernel/sync/lock/mutex.rs @@ -26,7 +26,7 @@ pub use new_mutex; /// Since it may block, [`Mutex`] needs to be used with care in atomic contexts. /// /// Instances of [`Mutex`] need a lock class and to be pinned. The recommended way to create such -/// instances is with the [`pin_init`](crate::pin_init) and [`new_mutex`] macros. +/// instances is with the [`pin_init`](pin_init::pin_init) and [`new_mutex`] macros. /// /// # Examples /// diff --git a/rust/kernel/sync/lock/spinlock.rs b/rust/kernel/sync/lock/spinlock.rs index ab2f8d075311..d7be38ccbdc7 100644 --- a/rust/kernel/sync/lock/spinlock.rs +++ b/rust/kernel/sync/lock/spinlock.rs @@ -24,7 +24,7 @@ pub use new_spinlock; /// unlocked, at which point another CPU will be allowed to make progress. /// /// Instances of [`SpinLock`] need a lock class and to be pinned. The recommended way to create such -/// instances is with the [`pin_init`](crate::pin_init) and [`new_spinlock`] macros. +/// instances is with the [`pin_init`](pin_init::pin_init) and [`new_spinlock`] macros. /// /// # Examples /// diff --git a/rust/kernel/sync/poll.rs b/rust/kernel/sync/poll.rs index c4934f82d68b..d7e6e59e124b 100644 --- a/rust/kernel/sync/poll.rs +++ b/rust/kernel/sync/poll.rs @@ -43,11 +43,11 @@ impl PollTable { /// /// # Safety /// - /// The caller must ensure that for the duration of 'a, the pointer will point at a valid poll + /// The caller must ensure that for the duration of `'a`, the pointer will point at a valid poll /// table (as defined in the type invariants). /// /// The caller must also ensure that the `poll_table` is only accessed via the returned - /// reference for the duration of 'a. + /// reference for the duration of `'a`. pub unsafe fn from_ptr<'a>(ptr: *mut bindings::poll_table) -> &'a mut PollTable { // SAFETY: The safety requirements guarantee the validity of the dereference, while the // `PollTable` type being transparent makes the cast ok. |