summaryrefslogtreecommitdiffstats
path: root/library/core/src/ptr
diff options
context:
space:
mode:
Diffstat (limited to 'library/core/src/ptr')
-rw-r--r--library/core/src/ptr/alignment.rs12
-rw-r--r--library/core/src/ptr/const_ptr.rs110
-rw-r--r--library/core/src/ptr/metadata.rs2
-rw-r--r--library/core/src/ptr/mod.rs54
-rw-r--r--library/core/src/ptr/mut_ptr.rs110
-rw-r--r--library/core/src/ptr/non_null.rs17
6 files changed, 141 insertions, 164 deletions
diff --git a/library/core/src/ptr/alignment.rs b/library/core/src/ptr/alignment.rs
index 64a5290c3..2123147c7 100644
--- a/library/core/src/ptr/alignment.rs
+++ b/library/core/src/ptr/alignment.rs
@@ -10,8 +10,7 @@ use crate::{cmp, fmt, hash, mem, num};
/// are likely not to be supported by actual allocators and linkers.
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
#[derive(Copy, Clone, Eq)]
-#[cfg_attr(bootstrap, derive(PartialEq))]
-#[cfg_attr(not(bootstrap), derive_const(PartialEq))]
+#[derive_const(PartialEq)]
#[repr(transparent)]
pub struct Alignment(AlignmentEnum);
@@ -203,8 +202,7 @@ type AlignmentEnum = AlignmentEnum32;
type AlignmentEnum = AlignmentEnum64;
#[derive(Copy, Clone, Eq)]
-#[cfg_attr(bootstrap, derive(PartialEq))]
-#[cfg_attr(not(bootstrap), derive_const(PartialEq))]
+#[derive_const(PartialEq)]
#[repr(u16)]
enum AlignmentEnum16 {
_Align1Shl0 = 1 << 0,
@@ -226,8 +224,7 @@ enum AlignmentEnum16 {
}
#[derive(Copy, Clone, Eq)]
-#[cfg_attr(bootstrap, derive(PartialEq))]
-#[cfg_attr(not(bootstrap), derive_const(PartialEq))]
+#[derive_const(PartialEq)]
#[repr(u32)]
enum AlignmentEnum32 {
_Align1Shl0 = 1 << 0,
@@ -265,8 +262,7 @@ enum AlignmentEnum32 {
}
#[derive(Copy, Clone, Eq)]
-#[cfg_attr(bootstrap, derive(PartialEq))]
-#[cfg_attr(not(bootstrap), derive_const(PartialEq))]
+#[derive_const(PartialEq)]
#[repr(u64)]
enum AlignmentEnum64 {
_Align1Shl0 = 1 << 0,
diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs
index d34813599..7b1cb5488 100644
--- a/library/core/src/ptr/const_ptr.rs
+++ b/library/core/src/ptr/const_ptr.rs
@@ -1,6 +1,6 @@
use super::*;
use crate::cmp::Ordering::{self, Equal, Greater, Less};
-use crate::intrinsics;
+use crate::intrinsics::{self, const_eval_select};
use crate::mem;
use crate::slice::{self, SliceIndex};
@@ -34,12 +34,23 @@ impl<T: ?Sized> *const T {
#[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
#[inline]
pub const fn is_null(self) -> bool {
- // Compare via a cast to a thin pointer, so fat pointers are only
- // considering their "data" part for null-ness.
- match (self as *const u8).guaranteed_eq(null()) {
- None => false,
- Some(res) => res,
+ #[inline]
+ fn runtime_impl(ptr: *const u8) -> bool {
+ ptr.addr() == 0
}
+
+ #[inline]
+ const fn const_impl(ptr: *const u8) -> bool {
+ // Compare via a cast to a thin pointer, so fat pointers are only
+ // considering their "data" part for null-ness.
+ match (ptr).guaranteed_eq(null_mut()) {
+ None => false,
+ Some(res) => res,
+ }
+ }
+
+ // SAFETY: The two versions are equivalent at runtime.
+ unsafe { const_eval_select((self as *const u8,), const_impl, runtime_impl) }
}
/// Casts to a pointer of another type.
@@ -191,14 +202,11 @@ impl<T: ?Sized> *const T {
#[must_use]
#[inline(always)]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn addr(self) -> usize
- where
- T: Sized,
- {
+ pub fn addr(self) -> usize {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
// SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
// provenance).
- unsafe { mem::transmute(self) }
+ unsafe { mem::transmute(self.cast::<()>()) }
}
/// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future
@@ -228,12 +236,9 @@ impl<T: ?Sized> *const T {
#[must_use]
#[inline(always)]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn expose_addr(self) -> usize
- where
- T: Sized,
- {
+ pub fn expose_addr(self) -> usize {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
- self as usize
+ self.cast::<()>() as usize
}
/// Creates a new pointer with the given address.
@@ -251,10 +256,7 @@ impl<T: ?Sized> *const T {
#[must_use]
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn with_addr(self, addr: usize) -> Self
- where
- T: Sized,
- {
+ pub fn with_addr(self, addr: usize) -> Self {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
//
// In the mean-time, this operation is defined to be "as if" it was
@@ -277,10 +279,7 @@ impl<T: ?Sized> *const T {
#[must_use]
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self
- where
- T: Sized,
- {
+ pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
self.with_addr(f(self.addr()))
}
@@ -1008,7 +1007,7 @@ impl<T: ?Sized> *const T {
#[stable(feature = "pointer_methods", since = "1.26.0")]
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
- #[inline]
+ #[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn sub(self, count: usize) -> Self
where
@@ -1173,7 +1172,7 @@ impl<T: ?Sized> *const T {
#[stable(feature = "pointer_methods", since = "1.26.0")]
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
- #[inline]
+ #[inline(always)]
pub const fn wrapping_sub(self, count: usize) -> Self
where
T: Sized,
@@ -1350,26 +1349,6 @@ impl<T: ?Sized> *const T {
panic!("align_offset: align is not a power-of-two");
}
- #[cfg(bootstrap)]
- {
- fn rt_impl<T>(p: *const T, align: usize) -> usize {
- // SAFETY: `align` has been checked to be a power of 2 above
- unsafe { align_offset(p, align) }
- }
-
- const fn ctfe_impl<T>(_: *const T, _: usize) -> usize {
- usize::MAX
- }
-
- // SAFETY:
- // It is permissible for `align_offset` to always return `usize::MAX`,
- // algorithm correctness can not depend on `align_offset` returning non-max values.
- //
- // As such the behaviour can't change after replacing `align_offset` with `usize::MAX`, only performance can.
- unsafe { intrinsics::const_eval_select((self, align), ctfe_impl, rt_impl) }
- }
-
- #[cfg(not(bootstrap))]
{
// SAFETY: `align` has been checked to be a power of 2 above
unsafe { align_offset(self, align) }
@@ -1406,8 +1385,7 @@ impl<T: ?Sized> *const T {
/// is never aligned if cast to a type with a stricter alignment than the reference's
/// underlying allocation.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1433,8 +1411,7 @@ impl<T: ?Sized> *const T {
/// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
/// pointer is aligned, even if the compiletime pointer wasn't aligned.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1460,8 +1437,7 @@ impl<T: ?Sized> *const T {
/// If a pointer is created from a fixed address, this function behaves the same during
/// runtime and compiletime.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1537,8 +1513,7 @@ impl<T: ?Sized> *const T {
/// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
/// cannot be stricter aligned than the reference's underlying allocation.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1563,8 +1538,7 @@ impl<T: ?Sized> *const T {
/// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
/// pointer is aligned, even if the compiletime pointer wasn't aligned.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1588,8 +1562,7 @@ impl<T: ?Sized> *const T {
/// If a pointer is created from a fixed address, this function behaves the same during
/// runtime and compiletime.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1613,11 +1586,22 @@ impl<T: ?Sized> *const T {
panic!("is_aligned_to: align is not a power-of-two");
}
- // We can't use the address of `self` in a `const fn`, so we use `align_offset` instead.
- // The cast to `()` is used to
- // 1. deal with fat pointers; and
- // 2. ensure that `align_offset` doesn't actually try to compute an offset.
- self.cast::<()>().align_offset(align) == 0
+ #[inline]
+ fn runtime_impl(ptr: *const (), align: usize) -> bool {
+ ptr.addr() & (align - 1) == 0
+ }
+
+ #[inline]
+ const fn const_impl(ptr: *const (), align: usize) -> bool {
+ // We can't use the address of `self` in a `const fn`, so we use `align_offset` instead.
+ // The cast to `()` is used to
+ // 1. deal with fat pointers; and
+ // 2. ensure that `align_offset` doesn't actually try to compute an offset.
+ ptr.align_offset(align) == 0
+ }
+
+ // SAFETY: The two versions are equivalent at runtime.
+ unsafe { const_eval_select((self.cast::<()>(), align), const_impl, runtime_impl) }
}
}
diff --git a/library/core/src/ptr/metadata.rs b/library/core/src/ptr/metadata.rs
index a8604843e..2ea032d4a 100644
--- a/library/core/src/ptr/metadata.rs
+++ b/library/core/src/ptr/metadata.rs
@@ -50,7 +50,7 @@ use crate::hash::{Hash, Hasher};
///
/// [`to_raw_parts`]: *const::to_raw_parts
#[lang = "pointee_trait"]
-#[cfg_attr(not(bootstrap), rustc_deny_explicit_impl)]
+#[rustc_deny_explicit_impl]
pub trait Pointee {
/// The type for metadata in pointers and references to `Self`.
#[lang = "metadata_type"]
diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs
index 48b2e88da..1ad9af154 100644
--- a/library/core/src/ptr/mod.rs
+++ b/library/core/src/ptr/mod.rs
@@ -516,6 +516,27 @@ pub const fn null<T: ?Sized + Thin>() -> *const T {
from_raw_parts(invalid(0), ())
}
+/// Creates a null mutable raw pointer.
+///
+/// # Examples
+///
+/// ```
+/// use std::ptr;
+///
+/// let p: *mut i32 = ptr::null_mut();
+/// assert!(p.is_null());
+/// ```
+#[inline(always)]
+#[must_use]
+#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_promotable]
+#[rustc_const_stable(feature = "const_ptr_null", since = "1.24.0")]
+#[rustc_allow_const_fn_unstable(ptr_metadata)]
+#[rustc_diagnostic_item = "ptr_null_mut"]
+pub const fn null_mut<T: ?Sized + Thin>() -> *mut T {
+ from_raw_parts_mut(invalid_mut(0), ())
+}
+
/// Creates an invalid pointer with the given address.
///
/// This is different from `addr as *const T`, which creates a pointer that picks up a previously
@@ -663,25 +684,26 @@ where
addr as *mut T
}
-/// Creates a null mutable raw pointer.
+/// Convert a reference to a raw pointer.
///
-/// # Examples
-///
-/// ```
-/// use std::ptr;
+/// This is equivalent to `r as *const T`, but is a bit safer since it will never silently change
+/// type or mutability, in particular if the code is refactored.
+#[inline(always)]
+#[must_use]
+#[unstable(feature = "ptr_from_ref", issue = "106116")]
+pub fn from_ref<T: ?Sized>(r: &T) -> *const T {
+ r
+}
+
+/// Convert a mutable reference to a raw pointer.
///
-/// let p: *mut i32 = ptr::null_mut();
-/// assert!(p.is_null());
-/// ```
+/// This is equivalent to `r as *mut T`, but is a bit safer since it will never silently change
+/// type or mutability, in particular if the code is refactored.
#[inline(always)]
#[must_use]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_promotable]
-#[rustc_const_stable(feature = "const_ptr_null", since = "1.24.0")]
-#[rustc_allow_const_fn_unstable(ptr_metadata)]
-#[rustc_diagnostic_item = "ptr_null_mut"]
-pub const fn null_mut<T: ?Sized + Thin>() -> *mut T {
- from_raw_parts_mut(invalid_mut(0), ())
+#[unstable(feature = "ptr_from_ref", issue = "106116")]
+pub fn from_mut<T: ?Sized>(r: &mut T) -> *mut T {
+ r
}
/// Forms a raw slice from a pointer and a length.
@@ -1679,7 +1701,7 @@ pub(crate) const unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usiz
// offset is not a multiple of `stride`, the input pointer was misaligned and no pointer
// offset will be able to produce a `p` aligned to the specified `a`.
//
- // The naive `-p (mod a)` equation inhibits LLVM's ability to select instructions
+ // The naive `-p (mod a)` equation inhibits LLVM's ability to select instructions
// like `lea`. We compute `(round_up_to_next_alignment(p, a) - p)` instead. This
// redistributes operations around the load-bearing, but pessimizing `and` instruction
// sufficiently for LLVM to be able to utilize the various optimizations it knows about.
diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs
index c924a90b1..ed1e3bd48 100644
--- a/library/core/src/ptr/mut_ptr.rs
+++ b/library/core/src/ptr/mut_ptr.rs
@@ -1,6 +1,6 @@
use super::*;
use crate::cmp::Ordering::{self, Equal, Greater, Less};
-use crate::intrinsics;
+use crate::intrinsics::{self, const_eval_select};
use crate::slice::{self, SliceIndex};
impl<T: ?Sized> *mut T {
@@ -33,12 +33,23 @@ impl<T: ?Sized> *mut T {
#[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
#[inline]
pub const fn is_null(self) -> bool {
- // Compare via a cast to a thin pointer, so fat pointers are only
- // considering their "data" part for null-ness.
- match (self as *mut u8).guaranteed_eq(null_mut()) {
- None => false,
- Some(res) => res,
+ #[inline]
+ fn runtime_impl(ptr: *mut u8) -> bool {
+ ptr.addr() == 0
}
+
+ #[inline]
+ const fn const_impl(ptr: *mut u8) -> bool {
+ // Compare via a cast to a thin pointer, so fat pointers are only
+ // considering their "data" part for null-ness.
+ match (ptr).guaranteed_eq(null_mut()) {
+ None => false,
+ Some(res) => res,
+ }
+ }
+
+ // SAFETY: The two versions are equivalent at runtime.
+ unsafe { const_eval_select((self as *mut u8,), const_impl, runtime_impl) }
}
/// Casts to a pointer of another type.
@@ -197,14 +208,11 @@ impl<T: ?Sized> *mut T {
#[must_use]
#[inline(always)]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn addr(self) -> usize
- where
- T: Sized,
- {
+ pub fn addr(self) -> usize {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
// SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
// provenance).
- unsafe { mem::transmute(self) }
+ unsafe { mem::transmute(self.cast::<()>()) }
}
/// Gets the "address" portion of the pointer, and 'exposes' the "provenance" part for future
@@ -234,12 +242,9 @@ impl<T: ?Sized> *mut T {
#[must_use]
#[inline(always)]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn expose_addr(self) -> usize
- where
- T: Sized,
- {
+ pub fn expose_addr(self) -> usize {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
- self as usize
+ self.cast::<()>() as usize
}
/// Creates a new pointer with the given address.
@@ -257,10 +262,7 @@ impl<T: ?Sized> *mut T {
#[must_use]
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn with_addr(self, addr: usize) -> Self
- where
- T: Sized,
- {
+ pub fn with_addr(self, addr: usize) -> Self {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
//
// In the mean-time, this operation is defined to be "as if" it was
@@ -283,10 +285,7 @@ impl<T: ?Sized> *mut T {
#[must_use]
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self
- where
- T: Sized,
- {
+ pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
self.with_addr(f(self.addr()))
}
@@ -1110,7 +1109,7 @@ impl<T: ?Sized> *mut T {
#[stable(feature = "pointer_methods", since = "1.26.0")]
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
- #[inline]
+ #[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn sub(self, count: usize) -> Self
where
@@ -1275,7 +1274,7 @@ impl<T: ?Sized> *mut T {
#[stable(feature = "pointer_methods", since = "1.26.0")]
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
- #[inline]
+ #[inline(always)]
pub const fn wrapping_sub(self, count: usize) -> Self
where
T: Sized,
@@ -1618,26 +1617,6 @@ impl<T: ?Sized> *mut T {
panic!("align_offset: align is not a power-of-two");
}
- #[cfg(bootstrap)]
- {
- fn rt_impl<T>(p: *mut T, align: usize) -> usize {
- // SAFETY: `align` has been checked to be a power of 2 above
- unsafe { align_offset(p, align) }
- }
-
- const fn ctfe_impl<T>(_: *mut T, _: usize) -> usize {
- usize::MAX
- }
-
- // SAFETY:
- // It is permissible for `align_offset` to always return `usize::MAX`,
- // algorithm correctness can not depend on `align_offset` returning non-max values.
- //
- // As such the behaviour can't change after replacing `align_offset` with `usize::MAX`, only performance can.
- unsafe { intrinsics::const_eval_select((self, align), ctfe_impl, rt_impl) }
- }
-
- #[cfg(not(bootstrap))]
{
// SAFETY: `align` has been checked to be a power of 2 above
unsafe { align_offset(self, align) }
@@ -1674,8 +1653,7 @@ impl<T: ?Sized> *mut T {
/// is never aligned if cast to a type with a stricter alignment than the reference's
/// underlying allocation.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
/// #![feature(const_mut_refs)]
@@ -1702,8 +1680,7 @@ impl<T: ?Sized> *mut T {
/// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
/// pointer is aligned, even if the compiletime pointer wasn't aligned.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1730,8 +1707,7 @@ impl<T: ?Sized> *mut T {
/// If a pointer is created from a fixed address, this function behaves the same during
/// runtime and compiletime.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1807,8 +1783,7 @@ impl<T: ?Sized> *mut T {
/// return `true` if the pointer is guaranteed to be aligned. This means that the pointer
/// cannot be stricter aligned than the reference's underlying allocation.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
/// #![feature(const_mut_refs)]
@@ -1834,8 +1809,7 @@ impl<T: ?Sized> *mut T {
/// Due to this behavior, it is possible that a runtime pointer derived from a compiletime
/// pointer is aligned, even if the compiletime pointer wasn't aligned.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1860,8 +1834,7 @@ impl<T: ?Sized> *mut T {
/// If a pointer is created from a fixed address, this function behaves the same during
/// runtime and compiletime.
///
- #[cfg_attr(bootstrap, doc = "```ignore")]
- #[cfg_attr(not(bootstrap), doc = "```")]
+ /// ```
/// #![feature(pointer_is_aligned)]
/// #![feature(const_pointer_is_aligned)]
///
@@ -1885,11 +1858,22 @@ impl<T: ?Sized> *mut T {
panic!("is_aligned_to: align is not a power-of-two");
}
- // We can't use the address of `self` in a `const fn`, so we use `align_offset` instead.
- // The cast to `()` is used to
- // 1. deal with fat pointers; and
- // 2. ensure that `align_offset` doesn't actually try to compute an offset.
- self.cast::<()>().align_offset(align) == 0
+ #[inline]
+ fn runtime_impl(ptr: *mut (), align: usize) -> bool {
+ ptr.addr() & (align - 1) == 0
+ }
+
+ #[inline]
+ const fn const_impl(ptr: *mut (), align: usize) -> bool {
+ // We can't use the address of `self` in a `const fn`, so we use `align_offset` instead.
+ // The cast to `()` is used to
+ // 1. deal with fat pointers; and
+ // 2. ensure that `align_offset` doesn't actually try to compute an offset.
+ ptr.align_offset(align) == 0
+ }
+
+ // SAFETY: The two versions are equivalent at runtime.
+ unsafe { const_eval_select((self.cast::<()>(), align), const_impl, runtime_impl) }
}
}
diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs
index c4348169c..8c1a64886 100644
--- a/library/core/src/ptr/non_null.rs
+++ b/library/core/src/ptr/non_null.rs
@@ -268,10 +268,7 @@ impl<T: ?Sized> NonNull<T> {
#[must_use]
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn addr(self) -> NonZeroUsize
- where
- T: Sized,
- {
+ pub fn addr(self) -> NonZeroUsize {
// SAFETY: The pointer is guaranteed by the type to be non-null,
// meaning that the address will be non-zero.
unsafe { NonZeroUsize::new_unchecked(self.pointer.addr()) }
@@ -286,10 +283,7 @@ impl<T: ?Sized> NonNull<T> {
#[must_use]
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn with_addr(self, addr: NonZeroUsize) -> Self
- where
- T: Sized,
- {
+ pub fn with_addr(self, addr: NonZeroUsize) -> Self {
// SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
unsafe { NonNull::new_unchecked(self.pointer.with_addr(addr.get()) as *mut _) }
}
@@ -303,10 +297,7 @@ impl<T: ?Sized> NonNull<T> {
#[must_use]
#[inline]
#[unstable(feature = "strict_provenance", issue = "95228")]
- pub fn map_addr(self, f: impl FnOnce(NonZeroUsize) -> NonZeroUsize) -> Self
- where
- T: Sized,
- {
+ pub fn map_addr(self, f: impl FnOnce(NonZeroUsize) -> NonZeroUsize) -> Self {
self.with_addr(f(self.addr()))
}
@@ -712,7 +703,7 @@ impl<T: ?Sized> const Clone for NonNull<T> {
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Copy for NonNull<T> {}
-#[unstable(feature = "coerce_unsized", issue = "27732")]
+#[unstable(feature = "coerce_unsized", issue = "18598")]
impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]