From 4f9fe856a25ab29345b90e7725509e9ee38a37be Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:19:41 +0200 Subject: Adding upstream version 1.69.0+dfsg1. Signed-off-by: Daniel Baumann --- library/core/src/sync/atomic.rs | 196 +++++++++++++++++++++++++++++++++++----- 1 file changed, 173 insertions(+), 23 deletions(-) (limited to 'library/core/src/sync') diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs index 14367eb09..040a59184 100644 --- a/library/core/src/sync/atomic.rs +++ b/library/core/src/sync/atomic.rs @@ -305,6 +305,50 @@ impl AtomicBool { AtomicBool { v: UnsafeCell::new(v as u8) } } + /// Creates a new `AtomicBool` from a pointer. + /// + /// # Examples + /// + /// ``` + /// #![feature(atomic_from_ptr, pointer_is_aligned)] + /// use std::sync::atomic::{self, AtomicBool}; + /// use std::mem::align_of; + /// + /// // Get a pointer to an allocated value + /// let ptr: *mut bool = Box::into_raw(Box::new(false)); + /// + /// assert!(ptr.is_aligned_to(align_of::())); + /// + /// { + /// // Create an atomic view of the allocated value + /// let atomic = unsafe { AtomicBool::from_ptr(ptr) }; + /// + /// // Use `atomic` for atomic operations, possibly share it with other threads + /// atomic.store(true, atomic::Ordering::Relaxed); + /// } + /// + /// // It's ok to non-atomically access the value behind `ptr`, + /// // since the reference to the atomic ended its lifetime in the block above + /// assert_eq!(unsafe { *ptr }, true); + /// + /// // Deallocate the value + /// unsafe { drop(Box::from_raw(ptr)) } + /// ``` + /// + /// # Safety + /// + /// * `ptr` must be aligned to `align_of::()` (note that on some platforms this can be bigger than `align_of::()`). + /// * `ptr` must be [valid] for both reads and writes for the whole lifetime `'a`. + /// * The value behind `ptr` must not be accessed through non-atomic operations for the whole lifetime `'a`. + /// + /// [valid]: crate::ptr#safety + #[unstable(feature = "atomic_from_ptr", issue = "108652")] + #[rustc_const_unstable(feature = "atomic_from_ptr", issue = "108652")] + pub const unsafe fn from_ptr<'a>(ptr: *mut bool) -> &'a AtomicBool { + // SAFETY: guaranteed by the caller + unsafe { &*ptr.cast() } + } + /// Returns a mutable reference to the underlying [`bool`]. /// /// This is safe because the mutable reference guarantees that no other threads are @@ -922,14 +966,14 @@ impl AtomicBool { /// /// let mut atomic = AtomicBool::new(true); /// unsafe { - /// my_atomic_op(atomic.as_mut_ptr()); + /// my_atomic_op(atomic.as_ptr()); /// } /// # } /// ``` #[inline] #[unstable(feature = "atomic_mut_ptr", reason = "recently added", issue = "66893")] - pub fn as_mut_ptr(&self) -> *mut bool { - self.v.get() as *mut bool + pub const fn as_ptr(&self) -> *mut bool { + self.v.get().cast() } /// Fetches the value, and applies a function to it that returns an optional @@ -1017,6 +1061,50 @@ impl AtomicPtr { AtomicPtr { p: UnsafeCell::new(p) } } + /// Creates a new `AtomicPtr` from a pointer. + /// + /// # Examples + /// + /// ``` + /// #![feature(atomic_from_ptr, pointer_is_aligned)] + /// use std::sync::atomic::{self, AtomicPtr}; + /// use std::mem::align_of; + /// + /// // Get a pointer to an allocated value + /// let ptr: *mut *mut u8 = Box::into_raw(Box::new(std::ptr::null_mut())); + /// + /// assert!(ptr.is_aligned_to(align_of::>())); + /// + /// { + /// // Create an atomic view of the allocated value + /// let atomic = unsafe { AtomicPtr::from_ptr(ptr) }; + /// + /// // Use `atomic` for atomic operations, possibly share it with other threads + /// atomic.store(std::ptr::NonNull::dangling().as_ptr(), atomic::Ordering::Relaxed); + /// } + /// + /// // It's ok to non-atomically access the value behind `ptr`, + /// // since the reference to the atomic ended its lifetime in the block above + /// assert!(!unsafe { *ptr }.is_null()); + /// + /// // Deallocate the value + /// unsafe { drop(Box::from_raw(ptr)) } + /// ``` + /// + /// # Safety + /// + /// * `ptr` must be aligned to `align_of::>()` (note that on some platforms this can be bigger than `align_of::<*mut T>()`). + /// * `ptr` must be [valid] for both reads and writes for the whole lifetime `'a`. + /// * The value behind `ptr` must not be accessed through non-atomic operations for the whole lifetime `'a`. + /// + /// [valid]: crate::ptr#safety + #[unstable(feature = "atomic_from_ptr", issue = "108652")] + #[rustc_const_unstable(feature = "atomic_from_ptr", issue = "108652")] + pub const unsafe fn from_ptr<'a>(ptr: *mut *mut T) -> &'a AtomicPtr { + // SAFETY: guaranteed by the caller + unsafe { &*ptr.cast() } + } + /// Returns a mutable reference to the underlying pointer. /// /// This is safe because the mutable reference guarantees that no other threads are @@ -1803,7 +1891,7 @@ impl AtomicPtr { /// /// ```ignore (extern-declaration) /// #![feature(atomic_mut_ptr)] - //// use std::sync::atomic::AtomicPtr; + /// use std::sync::atomic::AtomicPtr; /// /// extern "C" { /// fn my_atomic_op(arg: *mut *mut u32); @@ -1814,12 +1902,12 @@ impl AtomicPtr { /// /// // SAFETY: Safe as long as `my_atomic_op` is atomic. /// unsafe { - /// my_atomic_op(atomic.as_mut_ptr()); + /// my_atomic_op(atomic.as_ptr()); /// } /// ``` #[inline] #[unstable(feature = "atomic_mut_ptr", reason = "recently added", issue = "66893")] - pub fn as_mut_ptr(&self) -> *mut *mut T { + pub const fn as_ptr(&self) -> *mut *mut T { self.p.get() } } @@ -1861,7 +1949,8 @@ macro_rules! if_not_8_bit { ($_:ident, $($tt:tt)*) => { $($tt)* }; } -#[cfg(target_has_atomic_load_store = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic_load_store))] +#[cfg_attr(bootstrap, cfg(target_has_atomic_load_store = "8"))] macro_rules! atomic_int { ($cfg_cas:meta, $cfg_align:meta, @@ -1957,6 +2046,53 @@ macro_rules! atomic_int { Self {v: UnsafeCell::new(v)} } + /// Creates a new reference to an atomic integer from a pointer. + /// + /// # Examples + /// + /// ``` + /// #![feature(atomic_from_ptr, pointer_is_aligned)] + #[doc = concat!($extra_feature, "use std::sync::atomic::{self, ", stringify!($atomic_type), "};")] + /// use std::mem::align_of; + /// + /// // Get a pointer to an allocated value + #[doc = concat!("let ptr: *mut ", stringify!($int_type), " = Box::into_raw(Box::new(0));")] + /// + #[doc = concat!("assert!(ptr.is_aligned_to(align_of::<", stringify!($atomic_type), ">()));")] + /// + /// { + /// // Create an atomic view of the allocated value + // SAFETY: this is a doc comment, tidy, it can't hurt you (also guaranteed by the construction of `ptr` and the assert above) + #[doc = concat!(" let atomic = unsafe {", stringify!($atomic_type), "::from_ptr(ptr) };")] + /// + /// // Use `atomic` for atomic operations, possibly share it with other threads + /// atomic.store(1, atomic::Ordering::Relaxed); + /// } + /// + /// // It's ok to non-atomically access the value behind `ptr`, + /// // since the reference to the atomic ended its lifetime in the block above + /// assert_eq!(unsafe { *ptr }, 1); + /// + /// // Deallocate the value + /// unsafe { drop(Box::from_raw(ptr)) } + /// ``` + /// + /// # Safety + /// + /// * `ptr` must be aligned to `align_of::()` (note that on some platforms this can be bigger than `align_of::()`). + #[doc = concat!(" * `ptr` must be aligned to `align_of::<", stringify!($atomic_type), ">()` (note that on some platforms this can be bigger than `align_of::<", stringify!($int_type), ">()`).")] + /// * `ptr` must be [valid] for both reads and writes for the whole lifetime `'a`. + /// * The value behind `ptr` must not be accessed through non-atomic operations for the whole lifetime `'a`. + /// + /// [valid]: crate::ptr#safety + #[unstable(feature = "atomic_from_ptr", issue = "108652")] + #[rustc_const_unstable(feature = "atomic_from_ptr", issue = "108652")] + pub const unsafe fn from_ptr<'a>(ptr: *mut $int_type) -> &'a $atomic_type { + // SAFETY: guaranteed by the caller + unsafe { &*ptr.cast() } + } + + /// Returns a mutable reference to the underlying integer. /// /// This is safe because the mutable reference guarantees that no other threads are @@ -2718,7 +2854,7 @@ macro_rules! atomic_int { /// /// // SAFETY: Safe as long as `my_atomic_op` is atomic. /// unsafe { - /// my_atomic_op(atomic.as_mut_ptr()); + /// my_atomic_op(atomic.as_ptr()); /// } /// # } /// ``` @@ -2726,7 +2862,7 @@ macro_rules! atomic_int { #[unstable(feature = "atomic_mut_ptr", reason = "recently added", issue = "66893")] - pub fn as_mut_ptr(&self) -> *mut $int_type { + pub const fn as_ptr(&self) -> *mut $int_type { self.v.get() } } @@ -2988,7 +3124,8 @@ atomic_int_ptr_sized! { } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] fn strongest_failure_ordering(order: Ordering) -> Ordering { match order { Release => Relaxed, @@ -3030,7 +3167,8 @@ unsafe fn atomic_load(dst: *const T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_swap(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_swap`. @@ -3047,7 +3185,8 @@ unsafe fn atomic_swap(dst: *mut T, val: T, order: Ordering) -> T { /// Returns the previous value (like __sync_fetch_and_add). #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_add(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_add`. @@ -3064,7 +3203,8 @@ unsafe fn atomic_add(dst: *mut T, val: T, order: Ordering) -> T { /// Returns the previous value (like __sync_fetch_and_sub). #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_sub`. @@ -3080,7 +3220,8 @@ unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_compare_exchange( dst: *mut T, @@ -3115,7 +3256,8 @@ unsafe fn atomic_compare_exchange( } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_compare_exchange_weak( dst: *mut T, @@ -3150,7 +3292,8 @@ unsafe fn atomic_compare_exchange_weak( } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_and` @@ -3166,7 +3309,8 @@ unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_nand(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_nand` @@ -3182,7 +3326,8 @@ unsafe fn atomic_nand(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_or` @@ -3198,7 +3343,8 @@ unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_xor` @@ -3215,7 +3361,8 @@ unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { /// returns the max value (signed comparison) #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_max(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_max` @@ -3232,7 +3379,8 @@ unsafe fn atomic_max(dst: *mut T, val: T, order: Ordering) -> T { /// returns the min value (signed comparison) #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_min(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_min` @@ -3249,7 +3397,8 @@ unsafe fn atomic_min(dst: *mut T, val: T, order: Ordering) -> T { /// returns the max value (unsigned comparison) #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_umax(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_umax` @@ -3266,7 +3415,8 @@ unsafe fn atomic_umax(dst: *mut T, val: T, order: Ordering) -> T { /// returns the min value (unsigned comparison) #[inline] -#[cfg(target_has_atomic = "8")] +#[cfg_attr(not(bootstrap), cfg(target_has_atomic))] +#[cfg_attr(bootstrap, cfg(target_has_atomic = "8"))] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces unsafe fn atomic_umin(dst: *mut T, val: T, order: Ordering) -> T { // SAFETY: the caller must uphold the safety contract for `atomic_umin` -- cgit v1.2.3