summaryrefslogtreecommitdiffstats
path: root/library/core/src
diff options
context:
space:
mode:
Diffstat (limited to 'library/core/src')
-rw-r--r--library/core/src/alloc/global.rs3
-rw-r--r--library/core/src/alloc/layout.rs15
-rw-r--r--library/core/src/alloc/mod.rs1
-rw-r--r--library/core/src/any.rs13
-rw-r--r--library/core/src/array/ascii.rs47
-rw-r--r--library/core/src/array/iter.rs18
-rw-r--r--library/core/src/array/mod.rs35
-rw-r--r--library/core/src/ascii.rs75
-rw-r--r--library/core/src/ascii/ascii_char.rs565
-rw-r--r--library/core/src/bool.rs15
-rw-r--r--library/core/src/borrow.rs17
-rw-r--r--library/core/src/cell.rs53
-rw-r--r--library/core/src/cell/lazy.rs28
-rw-r--r--library/core/src/cell/once.rs3
-rw-r--r--library/core/src/char/convert.rs12
-rw-r--r--library/core/src/char/methods.rs76
-rw-r--r--library/core/src/char/mod.rs252
-rw-r--r--library/core/src/clone.rs23
-rw-r--r--library/core/src/cmp.rs212
-rw-r--r--library/core/src/convert/mod.rs49
-rw-r--r--library/core/src/convert/num.rs24
-rw-r--r--library/core/src/default.rs4
-rw-r--r--library/core/src/escape.rs112
-rw-r--r--library/core/src/ffi/c_str.rs21
-rw-r--r--library/core/src/ffi/mod.rs20
-rw-r--r--library/core/src/fmt/builders.rs14
-rw-r--r--library/core/src/fmt/float.rs12
-rw-r--r--library/core/src/fmt/mod.rs416
-rw-r--r--library/core/src/fmt/num.rs15
-rw-r--r--library/core/src/fmt/rt.rs212
-rw-r--r--library/core/src/fmt/rt/v1.rs63
-rw-r--r--library/core/src/future/into_future.rs1
-rw-r--r--library/core/src/future/join.rs6
-rw-r--r--library/core/src/future/mod.rs8
-rw-r--r--library/core/src/hash/mod.rs95
-rw-r--r--library/core/src/hash/sip.rs17
-rw-r--r--library/core/src/hint.rs13
-rw-r--r--library/core/src/internal_macros.rs71
-rw-r--r--library/core/src/intrinsics.rs79
-rw-r--r--library/core/src/intrinsics/mir.rs7
-rw-r--r--library/core/src/iter/adapters/chain.rs2
-rw-r--r--library/core/src/iter/adapters/filter.rs55
-rw-r--r--library/core/src/iter/adapters/filter_map.rs62
-rw-r--r--library/core/src/iter/adapters/flatten.rs68
-rw-r--r--library/core/src/iter/range.rs20
-rw-r--r--library/core/src/iter/sources/empty.rs3
-rw-r--r--library/core/src/iter/traits/collect.rs13
-rw-r--r--library/core/src/iter/traits/iterator.rs1
-rw-r--r--library/core/src/lib.rs18
-rw-r--r--library/core/src/macros/mod.rs3
-rw-r--r--library/core/src/macros/panic.md2
-rw-r--r--library/core/src/marker.rs211
-rw-r--r--library/core/src/mem/manually_drop.rs6
-rw-r--r--library/core/src/mem/maybe_uninit.rs14
-rw-r--r--library/core/src/mem/mod.rs62
-rw-r--r--library/core/src/mem/transmutability.rs6
-rw-r--r--library/core/src/net/socket_addr.rs27
-rw-r--r--library/core/src/num/error.rs6
-rw-r--r--library/core/src/num/f32.rs36
-rw-r--r--library/core/src/num/f64.rs36
-rw-r--r--library/core/src/num/flt2dec/strategy/grisu.rs16
-rw-r--r--library/core/src/num/int_macros.rs49
-rw-r--r--library/core/src/num/mod.rs87
-rw-r--r--library/core/src/num/nonzero.rs130
-rw-r--r--library/core/src/num/uint_macros.rs4
-rw-r--r--library/core/src/num/wrapping.rs168
-rw-r--r--library/core/src/ops/arith.rs78
-rw-r--r--library/core/src/ops/bit.rs69
-rw-r--r--library/core/src/ops/control_flow.rs9
-rw-r--r--library/core/src/ops/deref.rs8
-rw-r--r--library/core/src/ops/drop.rs68
-rw-r--r--library/core/src/ops/function.rs25
-rw-r--r--library/core/src/ops/index.rs5
-rw-r--r--library/core/src/ops/range.rs95
-rw-r--r--library/core/src/ops/try_trait.rs19
-rw-r--r--library/core/src/option.rs217
-rw-r--r--library/core/src/panic.rs40
-rw-r--r--library/core/src/panic/panic_info.rs2
-rw-r--r--library/core/src/panicking.rs2
-rw-r--r--library/core/src/pin.rs2
-rw-r--r--library/core/src/primitive_docs.rs96
-rw-r--r--library/core/src/ptr/alignment.rs16
-rw-r--r--library/core/src/ptr/const_ptr.rs27
-rw-r--r--library/core/src/ptr/mod.rs305
-rw-r--r--library/core/src/ptr/mut_ptr.rs39
-rw-r--r--library/core/src/ptr/non_null.rs30
-rw-r--r--library/core/src/ptr/unique.rs14
-rw-r--r--library/core/src/result.rs74
-rw-r--r--library/core/src/slice/ascii.rs79
-rw-r--r--library/core/src/slice/index.rs27
-rw-r--r--library/core/src/slice/iter.rs42
-rw-r--r--library/core/src/slice/iter/macros.rs75
-rw-r--r--library/core/src/slice/memchr.rs14
-rw-r--r--library/core/src/slice/mod.rs372
-rw-r--r--library/core/src/slice/select.rs302
-rw-r--r--library/core/src/slice/sort.rs181
-rw-r--r--library/core/src/str/mod.rs35
-rw-r--r--library/core/src/str/pattern.rs8
-rw-r--r--library/core/src/str/traits.rs22
-rw-r--r--library/core/src/sync/atomic.rs18
-rw-r--r--library/core/src/task/mod.rs2
-rw-r--r--library/core/src/task/poll.rs36
-rw-r--r--library/core/src/task/ready.rs61
-rw-r--r--library/core/src/task/wake.rs2
-rw-r--r--library/core/src/time.rs26
-rw-r--r--library/core/src/tuple.rs50
106 files changed, 3812 insertions, 2607 deletions
diff --git a/library/core/src/alloc/global.rs b/library/core/src/alloc/global.rs
index 18da70451..c58211170 100644
--- a/library/core/src/alloc/global.rs
+++ b/library/core/src/alloc/global.rs
@@ -235,7 +235,8 @@ pub unsafe trait GlobalAlloc {
/// * `new_size` must be greater than zero.
///
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,
- /// must not overflow (i.e., the rounded value must be less than `usize::MAX`).
+ /// must not overflow isize (i.e., the rounded value must be less than or
+ /// equal to `isize::MAX`).
///
/// (Extension subtraits might provide more specific bounds on
/// behavior, e.g., guarantee a sentinel address or a null pointer
diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs
index ac3d84718..597303037 100644
--- a/library/core/src/alloc/layout.rs
+++ b/library/core/src/alloc/layout.rs
@@ -231,9 +231,8 @@ impl Layout {
/// Returns an error if the combination of `self.size()` and the given
/// `align` violates the conditions listed in [`Layout::from_size_align`].
#[stable(feature = "alloc_layout_manipulation", since = "1.44.0")]
- #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub const fn align_to(&self, align: usize) -> Result<Self, LayoutError> {
+ pub fn align_to(&self, align: usize) -> Result<Self, LayoutError> {
Layout::from_size_align(self.size(), cmp::max(self.align(), align))
}
@@ -315,9 +314,8 @@ impl Layout {
///
/// On arithmetic overflow, returns `LayoutError`.
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
- #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub const fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutError> {
+ pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutError> {
// This cannot overflow. Quoting from the invariant of Layout:
// > `size`, when rounded up to the nearest multiple of `align`,
// > must not overflow isize (i.e., the rounded value must be
@@ -376,9 +374,8 @@ impl Layout {
/// # assert_eq!(repr_c(&[u64, u32, u16, u32]), Ok((s, vec![0, 8, 12, 16])));
/// ```
#[stable(feature = "alloc_layout_manipulation", since = "1.44.0")]
- #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub const fn extend(&self, next: Self) -> Result<(Self, usize), LayoutError> {
+ pub fn extend(&self, next: Self) -> Result<(Self, usize), LayoutError> {
let new_align = cmp::max(self.align, next.align);
let pad = self.padding_needed_for(next.align());
@@ -403,9 +400,8 @@ impl Layout {
///
/// On arithmetic overflow, returns `LayoutError`.
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
- #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub const fn repeat_packed(&self, n: usize) -> Result<Self, LayoutError> {
+ pub fn repeat_packed(&self, n: usize) -> Result<Self, LayoutError> {
let size = self.size().checked_mul(n).ok_or(LayoutError)?;
// The safe constructor is called here to enforce the isize size limit.
Layout::from_size_alignment(size, self.align)
@@ -418,9 +414,8 @@ impl Layout {
///
/// On arithmetic overflow, returns `LayoutError`.
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
- #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub const fn extend_packed(&self, next: Self) -> Result<Self, LayoutError> {
+ pub fn extend_packed(&self, next: Self) -> Result<Self, LayoutError> {
let new_size = self.size().checked_add(next.size()).ok_or(LayoutError)?;
// The safe constructor is called here to enforce the isize size limit.
Layout::from_size_alignment(new_size, self.align)
diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs
index ff390322d..d6ae2b821 100644
--- a/library/core/src/alloc/mod.rs
+++ b/library/core/src/alloc/mod.rs
@@ -105,7 +105,6 @@ impl fmt::Display for AllocError {
///
/// [*currently allocated*]: #currently-allocated-memory
#[unstable(feature = "allocator_api", issue = "32838")]
-#[const_trait]
pub unsafe trait Allocator {
/// Attempts to allocate a block of memory.
///
diff --git a/library/core/src/any.rs b/library/core/src/any.rs
index c27646b8f..7969f4055 100644
--- a/library/core/src/any.rs
+++ b/library/core/src/any.rs
@@ -662,13 +662,20 @@ impl dyn Any + Send + Sync {
/// While `TypeId` implements `Hash`, `PartialOrd`, and `Ord`, it is worth
/// noting that the hashes and ordering will vary between Rust releases. Beware
/// of relying on them inside of your code!
-#[derive(Clone, Copy, Debug, Hash, Eq)]
-#[derive_const(PartialEq, PartialOrd, Ord)]
+#[derive(Clone, Copy, Debug, Hash, Eq, PartialOrd, Ord)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct TypeId {
t: u64,
}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl PartialEq for TypeId {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ self.t == other.t
+ }
+}
+
impl TypeId {
/// Returns the `TypeId` of the type this generic function has been
/// instantiated with.
@@ -867,7 +874,7 @@ where
///
/// A data provider provides values by calling this type's provide methods.
#[unstable(feature = "provide_any", issue = "96024")]
-#[repr(transparent)]
+#[cfg_attr(not(doc), repr(transparent))] // work around https://github.com/rust-lang/rust/issues/90435
pub struct Demand<'a>(dyn Erased<'a> + 'a);
impl<'a> Demand<'a> {
diff --git a/library/core/src/array/ascii.rs b/library/core/src/array/ascii.rs
new file mode 100644
index 000000000..3fea9a440
--- /dev/null
+++ b/library/core/src/array/ascii.rs
@@ -0,0 +1,47 @@
+use crate::ascii;
+
+#[cfg(not(test))]
+impl<const N: usize> [u8; N] {
+ /// Converts this array of bytes into a array of ASCII characters,
+ /// or returns `None` if any of the characters is non-ASCII.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(ascii_char)]
+ /// #![feature(const_option)]
+ ///
+ /// const HEX_DIGITS: [std::ascii::Char; 16] =
+ /// *b"0123456789abcdef".as_ascii().unwrap();
+ ///
+ /// assert_eq!(HEX_DIGITS[1].as_str(), "1");
+ /// assert_eq!(HEX_DIGITS[10].as_str(), "a");
+ /// ```
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[must_use]
+ #[inline]
+ pub const fn as_ascii(&self) -> Option<&[ascii::Char; N]> {
+ if self.is_ascii() {
+ // SAFETY: Just checked that it's ASCII
+ Some(unsafe { self.as_ascii_unchecked() })
+ } else {
+ None
+ }
+ }
+
+ /// Converts this array of bytes into a array of ASCII characters,
+ /// without checking whether they're valid.
+ ///
+ /// # Safety
+ ///
+ /// Every byte in the array must be in `0..=127`, or else this is UB.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[must_use]
+ #[inline]
+ pub const unsafe fn as_ascii_unchecked(&self) -> &[ascii::Char; N] {
+ let byte_ptr: *const [u8; N] = self;
+ let ascii_ptr = byte_ptr as *const [ascii::Char; N];
+ // SAFETY: The caller promised all the bytes are ASCII
+ unsafe { &*ascii_ptr }
+ }
+}
diff --git a/library/core/src/array/iter.rs b/library/core/src/array/iter.rs
index 73e2c2cfb..587877dff 100644
--- a/library/core/src/array/iter.rs
+++ b/library/core/src/array/iter.rs
@@ -3,8 +3,9 @@
use crate::num::NonZeroUsize;
use crate::{
fmt,
+ intrinsics::transmute_unchecked,
iter::{self, ExactSizeIterator, FusedIterator, TrustedLen},
- mem::{self, MaybeUninit},
+ mem::MaybeUninit,
ops::{IndexRange, Range},
ptr,
};
@@ -63,18 +64,11 @@ impl<T, const N: usize> IntoIterator for [T; N] {
// an array of `T`.
//
// With that, this initialization satisfies the invariants.
-
- // FIXME(LukasKalbertodt): actually use `mem::transmute` here, once it
- // works with const generics:
- // `mem::transmute::<[T; N], [MaybeUninit<T>; N]>(array)`
//
- // Until then, we can use `mem::transmute_copy` to create a bitwise copy
- // as a different type, then forget `array` so that it is not dropped.
- unsafe {
- let iter = IntoIter { data: mem::transmute_copy(&self), alive: IndexRange::zero_to(N) };
- mem::forget(self);
- iter
- }
+ // FIXME: If normal `transmute` ever gets smart enough to allow this
+ // directly, use it instead of `transmute_unchecked`.
+ let data: [MaybeUninit<T>; N] = unsafe { transmute_unchecked(self) };
+ IntoIter { data, alive: IndexRange::zero_to(N) }
}
}
diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs
index 1643842d6..fec92320a 100644
--- a/library/core/src/array/mod.rs
+++ b/library/core/src/array/mod.rs
@@ -17,6 +17,7 @@ use crate::ops::{
};
use crate::slice::{Iter, IterMut};
+mod ascii;
mod drain;
mod equality;
mod iter;
@@ -148,8 +149,7 @@ impl Error for TryFromSliceError {
}
#[stable(feature = "try_from_slice_error", since = "1.36.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<Infallible> for TryFromSliceError {
+impl From<Infallible> for TryFromSliceError {
fn from(x: Infallible) -> TryFromSliceError {
match x {}
}
@@ -172,16 +172,14 @@ impl<T, const N: usize> AsMut<[T]> for [T; N] {
}
#[stable(feature = "array_borrow", since = "1.4.0")]
-#[rustc_const_unstable(feature = "const_borrow", issue = "91522")]
-impl<T, const N: usize> const Borrow<[T]> for [T; N] {
+impl<T, const N: usize> Borrow<[T]> for [T; N] {
fn borrow(&self) -> &[T] {
self
}
}
#[stable(feature = "array_borrow", since = "1.4.0")]
-#[rustc_const_unstable(feature = "const_borrow", issue = "91522")]
-impl<T, const N: usize> const BorrowMut<[T]> for [T; N] {
+impl<T, const N: usize> BorrowMut<[T]> for [T; N] {
fn borrow_mut(&mut self) -> &mut [T] {
self
}
@@ -206,6 +204,7 @@ where
{
type Error = TryFromSliceError;
+ #[inline]
fn try_from(slice: &[T]) -> Result<[T; N], TryFromSliceError> {
<&Self>::try_from(slice).map(|r| *r)
}
@@ -230,6 +229,7 @@ where
{
type Error = TryFromSliceError;
+ #[inline]
fn try_from(slice: &mut [T]) -> Result<[T; N], TryFromSliceError> {
<Self>::try_from(&*slice)
}
@@ -251,7 +251,8 @@ where
impl<'a, T, const N: usize> TryFrom<&'a [T]> for &'a [T; N] {
type Error = TryFromSliceError;
- fn try_from(slice: &[T]) -> Result<&[T; N], TryFromSliceError> {
+ #[inline]
+ fn try_from(slice: &'a [T]) -> Result<&'a [T; N], TryFromSliceError> {
if slice.len() == N {
let ptr = slice.as_ptr() as *const [T; N];
// SAFETY: ok because we just checked that the length fits
@@ -278,7 +279,8 @@ impl<'a, T, const N: usize> TryFrom<&'a [T]> for &'a [T; N] {
impl<'a, T, const N: usize> TryFrom<&'a mut [T]> for &'a mut [T; N] {
type Error = TryFromSliceError;
- fn try_from(slice: &mut [T]) -> Result<&mut [T; N], TryFromSliceError> {
+ #[inline]
+ fn try_from(slice: &'a mut [T]) -> Result<&'a mut [T; N], TryFromSliceError> {
if slice.len() == N {
let ptr = slice.as_mut_ptr() as *mut [T; N];
// SAFETY: ok because we just checked that the length fits
@@ -293,7 +295,6 @@ impl<'a, T, const N: usize> TryFrom<&'a mut [T]> for &'a mut [T; N] {
/// as required by the `Borrow` implementation.
///
/// ```
-/// #![feature(build_hasher_simple_hash_one)]
/// use std::hash::BuildHasher;
///
/// let b = std::collections::hash_map::RandomState::new();
@@ -336,10 +337,9 @@ impl<'a, T, const N: usize> IntoIterator for &'a mut [T; N] {
}
#[stable(feature = "index_trait_on_arrays", since = "1.50.0")]
-#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-impl<T, I, const N: usize> const Index<I> for [T; N]
+impl<T, I, const N: usize> Index<I> for [T; N]
where
- [T]: ~const Index<I>,
+ [T]: Index<I>,
{
type Output = <[T] as Index<I>>::Output;
@@ -350,10 +350,9 @@ where
}
#[stable(feature = "index_trait_on_arrays", since = "1.50.0")]
-#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-impl<T, I, const N: usize> const IndexMut<I> for [T; N]
+impl<T, I, const N: usize> IndexMut<I> for [T; N]
where
- [T]: ~const IndexMut<I>,
+ [T]: IndexMut<I>,
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
@@ -435,8 +434,7 @@ impl<T: Copy> SpecArrayClone for T {
macro_rules! array_impl_default {
{$n:expr, $t:ident $($ts:ident)*} => {
#[stable(since = "1.4.0", feature = "array_default")]
- #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
- impl<T> const Default for [T; $n] where T: ~const Default {
+ impl<T> Default for [T; $n] where T: Default {
fn default() -> [T; $n] {
[$t::default(), $($ts::default()),*]
}
@@ -445,8 +443,7 @@ macro_rules! array_impl_default {
};
{$n:expr,} => {
#[stable(since = "1.4.0", feature = "array_default")]
- #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
- impl<T> const Default for [T; $n] {
+ impl<T> Default for [T; $n] {
fn default() -> [T; $n] { [] }
}
};
diff --git a/library/core/src/ascii.rs b/library/core/src/ascii.rs
index 8a4cb78cc..ef8e4d098 100644
--- a/library/core/src/ascii.rs
+++ b/library/core/src/ascii.rs
@@ -9,10 +9,14 @@
#![stable(feature = "core_ascii", since = "1.26.0")]
+use crate::escape;
use crate::fmt;
use crate::iter::FusedIterator;
-use crate::ops::Range;
-use crate::str::from_utf8_unchecked;
+use crate::num::NonZeroUsize;
+
+mod ascii_char;
+#[unstable(feature = "ascii_char", issue = "110998")]
+pub use ascii_char::AsciiChar as Char;
/// An iterator over the escaped version of a byte.
///
@@ -21,10 +25,7 @@ use crate::str::from_utf8_unchecked;
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
-pub struct EscapeDefault {
- range: Range<u8>,
- data: [u8; 4],
-}
+pub struct EscapeDefault(escape::EscapeIterInner<4>);
/// Returns an iterator that produces an escaped version of a `u8`.
///
@@ -90,21 +91,9 @@ pub struct EscapeDefault {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_default(c: u8) -> EscapeDefault {
- let (data, len) = match c {
- b'\t' => ([b'\\', b't', 0, 0], 2),
- b'\r' => ([b'\\', b'r', 0, 0], 2),
- b'\n' => ([b'\\', b'n', 0, 0], 2),
- b'\\' => ([b'\\', b'\\', 0, 0], 2),
- b'\'' => ([b'\\', b'\'', 0, 0], 2),
- b'"' => ([b'\\', b'"', 0, 0], 2),
- b'\x20'..=b'\x7e' => ([c, 0, 0, 0], 1),
- _ => {
- let hex_digits: &[u8; 16] = b"0123456789abcdef";
- ([b'\\', b'x', hex_digits[(c >> 4) as usize], hex_digits[(c & 0xf) as usize]], 4)
- }
- };
-
- return EscapeDefault { range: 0..len, data };
+ let mut data = [Char::Null; 4];
+ let range = escape::escape_ascii_into(&mut data, c);
+ EscapeDefault(escape::EscapeIterInner::new(data, range))
}
#[stable(feature = "rust1", since = "1.0.0")]
@@ -113,33 +102,59 @@ impl Iterator for EscapeDefault {
#[inline]
fn next(&mut self) -> Option<u8> {
- self.range.next().map(|i| self.data[i as usize])
+ self.0.next()
}
+
+ #[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- self.range.size_hint()
+ let n = self.0.len();
+ (n, Some(n))
}
+
+ #[inline]
+ fn count(self) -> usize {
+ self.0.len()
+ }
+
+ #[inline]
fn last(mut self) -> Option<u8> {
- self.next_back()
+ self.0.next_back()
+ }
+
+ #[inline]
+ fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
+ self.0.advance_by(n)
}
}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl DoubleEndedIterator for EscapeDefault {
+ #[inline]
fn next_back(&mut self) -> Option<u8> {
- self.range.next_back().map(|i| self.data[i as usize])
+ self.0.next_back()
+ }
+
+ #[inline]
+ fn advance_back_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
+ self.0.advance_back_by(n)
}
}
+
#[stable(feature = "rust1", since = "1.0.0")]
-impl ExactSizeIterator for EscapeDefault {}
+impl ExactSizeIterator for EscapeDefault {
+ #[inline]
+ fn len(&self) -> usize {
+ self.0.len()
+ }
+}
+
#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for EscapeDefault {}
#[stable(feature = "ascii_escape_display", since = "1.39.0")]
impl fmt::Display for EscapeDefault {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- // SAFETY: ok because `escape_default` created only valid utf-8 data
- f.write_str(unsafe {
- from_utf8_unchecked(&self.data[(self.range.start as usize)..(self.range.end as usize)])
- })
+ f.write_str(self.0.as_str())
}
}
diff --git a/library/core/src/ascii/ascii_char.rs b/library/core/src/ascii/ascii_char.rs
new file mode 100644
index 000000000..f093a0990
--- /dev/null
+++ b/library/core/src/ascii/ascii_char.rs
@@ -0,0 +1,565 @@
+//! This uses the name `AsciiChar`, even though it's not exposed that way right now,
+//! because it avoids a whole bunch of "are you sure you didn't mean `char`?"
+//! suggestions from rustc if you get anything slightly wrong in here, and overall
+//! helps with clarity as we're also referring to `char` intentionally in here.
+
+use crate::fmt;
+use crate::mem::transmute;
+
+/// One of the 128 Unicode characters from U+0000 through U+007F,
+/// often known as the [ASCII] subset.
+///
+/// Officially, this is the first [block] in Unicode, _Basic Latin_.
+/// For details, see the [*C0 Controls and Basic Latin*][chart] code chart.
+///
+/// This block was based on older 7-bit character code standards such as
+/// ANSI X3.4-1977, ISO 646-1973, and [NIST FIPS 1-2].
+///
+/// # When to use this
+///
+/// The main advantage of this subset is that it's always valid UTF-8. As such,
+/// the `&[ascii::Char]` -> `&str` conversion function (as well as other related
+/// ones) are O(1): *no* runtime checks are needed.
+///
+/// If you're consuming strings, you should usually handle Unicode and thus
+/// accept `str`s, not limit yourself to `ascii::Char`s.
+///
+/// However, certain formats are intentionally designed to produce ASCII-only
+/// output in order to be 8-bit-clean. In those cases, it can be simpler and
+/// faster to generate `ascii::Char`s instead of dealing with the variable width
+/// properties of general UTF-8 encoded strings, while still allowing the result
+/// to be used freely with other Rust things that deal in general `str`s.
+///
+/// For example, a UUID library might offer a way to produce the string
+/// representation of a UUID as an `[ascii::Char; 36]` to avoid memory
+/// allocation yet still allow it to be used as UTF-8 via `as_str` without
+/// paying for validation (or needing `unsafe` code) the way it would if it
+/// were provided as a `[u8; 36]`.
+///
+/// # Layout
+///
+/// This type is guaranteed to have a size and alignment of 1 byte.
+///
+/// # Names
+///
+/// The variants on this type are [Unicode names][NamesList] of the characters
+/// in upper camel case, with a few tweaks:
+/// - For `<control>` characters, the primary alias name is used.
+/// - `LATIN` is dropped, as this block has no non-latin letters.
+/// - `LETTER` is dropped, as `CAPITAL`/`SMALL` suffices in this block.
+/// - `DIGIT`s use a single digit rather than writing out `ZERO`, `ONE`, etc.
+///
+/// [ASCII]: https://www.unicode.org/glossary/index.html#ASCII
+/// [block]: https://www.unicode.org/glossary/index.html#block
+/// [chart]: https://www.unicode.org/charts/PDF/U0000.pdf
+/// [NIST FIPS 1-2]: https://nvlpubs.nist.gov/nistpubs/Legacy/FIPS/fipspub1-2-1977.pdf
+/// [NamesList]: https://www.unicode.org/Public/15.0.0/ucd/NamesList.txt
+#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
+#[unstable(feature = "ascii_char", issue = "110998")]
+#[repr(u8)]
+pub enum AsciiChar {
+ /// U+0000
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Null = 0,
+ /// U+0001
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ StartOfHeading = 1,
+ /// U+0002
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ StartOfText = 2,
+ /// U+0003
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ EndOfText = 3,
+ /// U+0004
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ EndOfTransmission = 4,
+ /// U+0005
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Enquiry = 5,
+ /// U+0006
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Acknowledge = 6,
+ /// U+0007
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Bell = 7,
+ /// U+0008
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Backspace = 8,
+ /// U+0009
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CharacterTabulation = 9,
+ /// U+000A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ LineFeed = 10,
+ /// U+000B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ LineTabulation = 11,
+ /// U+000C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ FormFeed = 12,
+ /// U+000D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CarriageReturn = 13,
+ /// U+000E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ ShiftOut = 14,
+ /// U+000F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ ShiftIn = 15,
+ /// U+0010
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ DataLinkEscape = 16,
+ /// U+0011
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ DeviceControlOne = 17,
+ /// U+0012
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ DeviceControlTwo = 18,
+ /// U+0013
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ DeviceControlThree = 19,
+ /// U+0014
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ DeviceControlFour = 20,
+ /// U+0015
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ NegativeAcknowledge = 21,
+ /// U+0016
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SynchronousIdle = 22,
+ /// U+0017
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ EndOfTransmissionBlock = 23,
+ /// U+0018
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Cancel = 24,
+ /// U+0019
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ EndOfMedium = 25,
+ /// U+001A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Substitute = 26,
+ /// U+001B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Escape = 27,
+ /// U+001C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ InformationSeparatorFour = 28,
+ /// U+001D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ InformationSeparatorThree = 29,
+ /// U+001E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ InformationSeparatorTwo = 30,
+ /// U+001F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ InformationSeparatorOne = 31,
+ /// U+0020
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Space = 32,
+ /// U+0021
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ ExclamationMark = 33,
+ /// U+0022
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ QuotationMark = 34,
+ /// U+0023
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ NumberSign = 35,
+ /// U+0024
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ DollarSign = 36,
+ /// U+0025
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ PercentSign = 37,
+ /// U+0026
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Ampersand = 38,
+ /// U+0027
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Apostrophe = 39,
+ /// U+0028
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ LeftParenthesis = 40,
+ /// U+0029
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ RightParenthesis = 41,
+ /// U+002A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Asterisk = 42,
+ /// U+002B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ PlusSign = 43,
+ /// U+002C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Comma = 44,
+ /// U+002D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ HyphenMinus = 45,
+ /// U+002E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ FullStop = 46,
+ /// U+002F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Solidus = 47,
+ /// U+0030
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit0 = 48,
+ /// U+0031
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit1 = 49,
+ /// U+0032
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit2 = 50,
+ /// U+0033
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit3 = 51,
+ /// U+0034
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit4 = 52,
+ /// U+0035
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit5 = 53,
+ /// U+0036
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit6 = 54,
+ /// U+0037
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit7 = 55,
+ /// U+0038
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit8 = 56,
+ /// U+0039
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Digit9 = 57,
+ /// U+003A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Colon = 58,
+ /// U+003B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Semicolon = 59,
+ /// U+003C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ LessThanSign = 60,
+ /// U+003D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ EqualsSign = 61,
+ /// U+003E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ GreaterThanSign = 62,
+ /// U+003F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ QuestionMark = 63,
+ /// U+0040
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CommercialAt = 64,
+ /// U+0041
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalA = 65,
+ /// U+0042
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalB = 66,
+ /// U+0043
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalC = 67,
+ /// U+0044
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalD = 68,
+ /// U+0045
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalE = 69,
+ /// U+0046
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalF = 70,
+ /// U+0047
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalG = 71,
+ /// U+0048
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalH = 72,
+ /// U+0049
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalI = 73,
+ /// U+004A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalJ = 74,
+ /// U+004B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalK = 75,
+ /// U+004C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalL = 76,
+ /// U+004D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalM = 77,
+ /// U+004E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalN = 78,
+ /// U+004F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalO = 79,
+ /// U+0050
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalP = 80,
+ /// U+0051
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalQ = 81,
+ /// U+0052
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalR = 82,
+ /// U+0053
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalS = 83,
+ /// U+0054
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalT = 84,
+ /// U+0055
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalU = 85,
+ /// U+0056
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalV = 86,
+ /// U+0057
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalW = 87,
+ /// U+0058
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalX = 88,
+ /// U+0059
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalY = 89,
+ /// U+005A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CapitalZ = 90,
+ /// U+005B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ LeftSquareBracket = 91,
+ /// U+005C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ ReverseSolidus = 92,
+ /// U+005D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ RightSquareBracket = 93,
+ /// U+005E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ CircumflexAccent = 94,
+ /// U+005F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ LowLine = 95,
+ /// U+0060
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ GraveAccent = 96,
+ /// U+0061
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallA = 97,
+ /// U+0062
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallB = 98,
+ /// U+0063
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallC = 99,
+ /// U+0064
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallD = 100,
+ /// U+0065
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallE = 101,
+ /// U+0066
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallF = 102,
+ /// U+0067
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallG = 103,
+ /// U+0068
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallH = 104,
+ /// U+0069
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallI = 105,
+ /// U+006A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallJ = 106,
+ /// U+006B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallK = 107,
+ /// U+006C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallL = 108,
+ /// U+006D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallM = 109,
+ /// U+006E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallN = 110,
+ /// U+006F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallO = 111,
+ /// U+0070
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallP = 112,
+ /// U+0071
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallQ = 113,
+ /// U+0072
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallR = 114,
+ /// U+0073
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallS = 115,
+ /// U+0074
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallT = 116,
+ /// U+0075
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallU = 117,
+ /// U+0076
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallV = 118,
+ /// U+0077
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallW = 119,
+ /// U+0078
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallX = 120,
+ /// U+0079
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallY = 121,
+ /// U+007A
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ SmallZ = 122,
+ /// U+007B
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ LeftCurlyBracket = 123,
+ /// U+007C
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ VerticalLine = 124,
+ /// U+007D
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ RightCurlyBracket = 125,
+ /// U+007E
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Tilde = 126,
+ /// U+007F
+ #[unstable(feature = "ascii_char_variants", issue = "110998")]
+ Delete = 127,
+}
+
+impl AsciiChar {
+ /// Creates an ascii character from the byte `b`,
+ /// or returns `None` if it's too large.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn from_u8(b: u8) -> Option<Self> {
+ if b <= 127 {
+ // SAFETY: Just checked that `b` is in-range
+ Some(unsafe { Self::from_u8_unchecked(b) })
+ } else {
+ None
+ }
+ }
+
+ /// Creates an ASCII character from the byte `b`,
+ /// without checking whether it's valid.
+ ///
+ /// # Safety
+ ///
+ /// `b` must be in `0..=127`, or else this is UB.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const unsafe fn from_u8_unchecked(b: u8) -> Self {
+ // SAFETY: Our safety precondition is that `b` is in-range.
+ unsafe { transmute(b) }
+ }
+
+ /// When passed the *number* `0`, `1`, …, `9`, returns the *character*
+ /// `'0'`, `'1'`, …, `'9'` respectively.
+ ///
+ /// If `d >= 10`, returns `None`.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn digit(d: u8) -> Option<Self> {
+ if d < 10 {
+ // SAFETY: Just checked it's in-range.
+ Some(unsafe { Self::digit_unchecked(d) })
+ } else {
+ None
+ }
+ }
+
+ /// When passed the *number* `0`, `1`, …, `9`, returns the *character*
+ /// `'0'`, `'1'`, …, `'9'` respectively, without checking that it's in-range.
+ ///
+ /// # Safety
+ ///
+ /// This is immediate UB if called with `d > 64`.
+ ///
+ /// If `d >= 10` and `d <= 64`, this is allowed to return any value or panic.
+ /// Notably, it should not be expected to return hex digits, or any other
+ /// reasonable extension of the decimal digits.
+ ///
+ /// (This lose safety condition is intended to simplify soundness proofs
+ /// when writing code using this method, since the implementation doesn't
+ /// need something really specific, not to make those other arguments do
+ /// something useful. It might be tightened before stabilization.)
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const unsafe fn digit_unchecked(d: u8) -> Self {
+ debug_assert!(d < 10);
+
+ // SAFETY: `'0'` through `'9'` are U+00030 through U+0039,
+ // so because `d` must be 64 or less the addition can return at most
+ // 112 (0x70), which doesn't overflow and is within the ASCII range.
+ unsafe {
+ let byte = b'0'.unchecked_add(d);
+ Self::from_u8_unchecked(byte)
+ }
+ }
+
+ /// Gets this ASCII character as a byte.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn as_u8(self) -> u8 {
+ self as u8
+ }
+
+ /// Gets this ASCII character as a `char` Unicode Scalar Value.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn as_char(self) -> char {
+ self as u8 as char
+ }
+
+ /// Views this ASCII character as a one-code-unit UTF-8 `str`.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn as_str(&self) -> &str {
+ crate::slice::from_ref(self).as_str()
+ }
+}
+
+impl [AsciiChar] {
+ /// Views this slice of ASCII characters as a UTF-8 `str`.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn as_str(&self) -> &str {
+ let ascii_ptr: *const Self = self;
+ let str_ptr = ascii_ptr as *const str;
+ // SAFETY: Each ASCII codepoint in UTF-8 is encoded as one single-byte
+ // code unit having the same value as the ASCII byte.
+ unsafe { &*str_ptr }
+ }
+
+ /// Views this slice of ASCII characters as a slice of `u8` bytes.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn as_bytes(&self) -> &[u8] {
+ self.as_str().as_bytes()
+ }
+}
+
+#[unstable(feature = "ascii_char", issue = "110998")]
+impl fmt::Display for AsciiChar {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ <str as fmt::Display>::fmt(self.as_str(), f)
+ }
+}
diff --git a/library/core/src/bool.rs b/library/core/src/bool.rs
index db1c505ba..03cdff9b1 100644
--- a/library/core/src/bool.rs
+++ b/library/core/src/bool.rs
@@ -1,7 +1,5 @@
//! impl bool {}
-use crate::marker::Destruct;
-
impl bool {
/// Returns `Some(t)` if the `bool` is [`true`](../std/keyword.true.html),
/// or `None` otherwise.
@@ -31,12 +29,8 @@ impl bool {
/// assert_eq!(a, 2);
/// ```
#[stable(feature = "bool_to_option", since = "1.62.0")]
- #[rustc_const_unstable(feature = "const_bool_to_option", issue = "91917")]
#[inline]
- pub const fn then_some<T>(self, t: T) -> Option<T>
- where
- T: ~const Destruct,
- {
+ pub fn then_some<T>(self, t: T) -> Option<T> {
if self { Some(t) } else { None }
}
@@ -61,13 +55,8 @@ impl bool {
/// assert_eq!(a, 1);
/// ```
#[stable(feature = "lazy_bool_to_option", since = "1.50.0")]
- #[rustc_const_unstable(feature = "const_bool_to_option", issue = "91917")]
#[inline]
- pub const fn then<T, F>(self, f: F) -> Option<T>
- where
- F: ~const FnOnce() -> T,
- F: ~const Destruct,
- {
+ pub fn then<T, F: FnOnce() -> T>(self, f: F) -> Option<T> {
if self { Some(f()) } else { None }
}
}
diff --git a/library/core/src/borrow.rs b/library/core/src/borrow.rs
index 4a8302ee4..efc9ada38 100644
--- a/library/core/src/borrow.rs
+++ b/library/core/src/borrow.rs
@@ -154,7 +154,6 @@
/// [`String`]: ../../std/string/struct.String.html
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "Borrow"]
-#[const_trait]
pub trait Borrow<Borrowed: ?Sized> {
/// Immutably borrows from an owned value.
///
@@ -185,7 +184,6 @@ pub trait Borrow<Borrowed: ?Sized> {
/// an underlying type by providing a mutable reference. See [`Borrow<T>`]
/// for more information on borrowing as another type.
#[stable(feature = "rust1", since = "1.0.0")]
-#[const_trait]
pub trait BorrowMut<Borrowed: ?Sized>: Borrow<Borrowed> {
/// Mutably borrows from an owned value.
///
@@ -207,8 +205,7 @@ pub trait BorrowMut<Borrowed: ?Sized>: Borrow<Borrowed> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_borrow", issue = "91522")]
-impl<T: ?Sized> const Borrow<T> for T {
+impl<T: ?Sized> Borrow<T> for T {
#[rustc_diagnostic_item = "noop_method_borrow"]
fn borrow(&self) -> &T {
self
@@ -216,32 +213,28 @@ impl<T: ?Sized> const Borrow<T> for T {
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_borrow", issue = "91522")]
-impl<T: ?Sized> const BorrowMut<T> for T {
+impl<T: ?Sized> BorrowMut<T> for T {
fn borrow_mut(&mut self) -> &mut T {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_borrow", issue = "91522")]
-impl<T: ?Sized> const Borrow<T> for &T {
+impl<T: ?Sized> Borrow<T> for &T {
fn borrow(&self) -> &T {
&**self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_borrow", issue = "91522")]
-impl<T: ?Sized> const Borrow<T> for &mut T {
+impl<T: ?Sized> Borrow<T> for &mut T {
fn borrow(&self) -> &T {
&**self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_borrow", issue = "91522")]
-impl<T: ?Sized> const BorrowMut<T> for &mut T {
+impl<T: ?Sized> BorrowMut<T> for &mut T {
fn borrow_mut(&mut self) -> &mut T {
&mut **self
}
diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs
index 33d928e23..744767aae 100644
--- a/library/core/src/cell.rs
+++ b/library/core/src/cell.rs
@@ -115,7 +115,7 @@
//! let shared_map: Rc<RefCell<_>> = Rc::new(RefCell::new(HashMap::new()));
//! // Create a new block to limit the scope of the dynamic borrow
//! {
-//! let mut map: RefMut<_> = shared_map.borrow_mut();
+//! let mut map: RefMut<'_, _> = shared_map.borrow_mut();
//! map.insert("africa", 92388);
//! map.insert("kyoto", 11837);
//! map.insert("piccadilly", 11826);
@@ -370,8 +370,7 @@ impl<T: Ord + Copy> Ord for Cell<T> {
}
#[stable(feature = "cell_from", since = "1.12.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for Cell<T> {
+impl<T> From<T> for Cell<T> {
/// Creates a new `Cell<T>` containing the given value.
fn from(t: T) -> Cell<T> {
Cell::new(t)
@@ -1318,8 +1317,7 @@ impl<T: ?Sized + Ord> Ord for RefCell<T> {
}
#[stable(feature = "cell_from", since = "1.12.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for RefCell<T> {
+impl<T> From<T> for RefCell<T> {
/// Creates a new `RefCell<T>` containing the given value.
fn from(t: T) -> RefCell<T> {
RefCell::new(t)
@@ -1437,8 +1435,8 @@ impl<'b, T: ?Sized> Ref<'b, T> {
/// use std::cell::{RefCell, Ref};
///
/// let c = RefCell::new((5, 'b'));
- /// let b1: Ref<(u32, char)> = c.borrow();
- /// let b2: Ref<u32> = Ref::map(b1, |t| &t.0);
+ /// let b1: Ref<'_, (u32, char)> = c.borrow();
+ /// let b2: Ref<'_, u32> = Ref::map(b1, |t| &t.0);
/// assert_eq!(*b2, 5)
/// ```
#[stable(feature = "cell_map", since = "1.8.0")]
@@ -1466,8 +1464,8 @@ impl<'b, T: ?Sized> Ref<'b, T> {
/// use std::cell::{RefCell, Ref};
///
/// let c = RefCell::new(vec![1, 2, 3]);
- /// let b1: Ref<Vec<u32>> = c.borrow();
- /// let b2: Result<Ref<u32>, _> = Ref::filter_map(b1, |v| v.get(1));
+ /// let b1: Ref<'_, Vec<u32>> = c.borrow();
+ /// let b2: Result<Ref<'_, u32>, _> = Ref::filter_map(b1, |v| v.get(1));
/// assert_eq!(*b2.unwrap(), 2);
/// ```
#[stable(feature = "cell_filter_map", since = "1.63.0")]
@@ -1579,8 +1577,8 @@ impl<'b, T: ?Sized> RefMut<'b, T> {
///
/// let c = RefCell::new((5, 'b'));
/// {
- /// let b1: RefMut<(u32, char)> = c.borrow_mut();
- /// let mut b2: RefMut<u32> = RefMut::map(b1, |t| &mut t.0);
+ /// let b1: RefMut<'_, (u32, char)> = c.borrow_mut();
+ /// let mut b2: RefMut<'_, u32> = RefMut::map(b1, |t| &mut t.0);
/// assert_eq!(*b2, 5);
/// *b2 = 42;
/// }
@@ -1614,8 +1612,8 @@ impl<'b, T: ?Sized> RefMut<'b, T> {
/// let c = RefCell::new(vec![1, 2, 3]);
///
/// {
- /// let b1: RefMut<Vec<u32>> = c.borrow_mut();
- /// let mut b2: Result<RefMut<u32>, _> = RefMut::filter_map(b1, |v| v.get_mut(1));
+ /// let b1: RefMut<'_, Vec<u32>> = c.borrow_mut();
+ /// let mut b2: Result<RefMut<'_, u32>, _> = RefMut::filter_map(b1, |v| v.get_mut(1));
///
/// if let Ok(mut b2) = b2 {
/// *b2 += 2;
@@ -2032,6 +2030,27 @@ impl<T> UnsafeCell<T> {
}
impl<T: ?Sized> UnsafeCell<T> {
+ /// Converts from `&mut T` to `&mut UnsafeCell<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![feature(unsafe_cell_from_mut)]
+ /// use std::cell::UnsafeCell;
+ ///
+ /// let mut val = 42;
+ /// let uc = UnsafeCell::from_mut(&mut val);
+ ///
+ /// *uc.get_mut() -= 1;
+ /// assert_eq!(*uc.get_mut(), 41);
+ /// ```
+ #[inline(always)]
+ #[unstable(feature = "unsafe_cell_from_mut", issue = "111645")]
+ pub const fn from_mut(value: &mut T) -> &mut UnsafeCell<T> {
+ // SAFETY: `UnsafeCell<T>` has the same memory layout as `T` due to #[repr(transparent)].
+ unsafe { &mut *(value as *mut T as *mut UnsafeCell<T>) }
+ }
+
/// Gets a mutable pointer to the wrapped value.
///
/// This can be cast to a pointer of any kind.
@@ -2102,6 +2121,8 @@ impl<T: ?Sized> UnsafeCell<T> {
///
/// let m = MaybeUninit::<UnsafeCell<i32>>::uninit();
/// unsafe { UnsafeCell::raw_get(m.as_ptr()).write(5); }
+ /// // avoid below which references to uninitialized data
+ /// // unsafe { UnsafeCell::get(&*m.as_ptr()).write(5); }
/// let uc = unsafe { m.assume_init() };
///
/// assert_eq!(uc.into_inner(), 5);
@@ -2126,8 +2147,7 @@ impl<T: Default> Default for UnsafeCell<T> {
}
#[stable(feature = "cell_from", since = "1.12.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for UnsafeCell<T> {
+impl<T> From<T> for UnsafeCell<T> {
/// Creates a new `UnsafeCell<T>` containing the given value.
fn from(t: T) -> UnsafeCell<T> {
UnsafeCell::new(t)
@@ -2226,8 +2246,7 @@ impl<T: Default> Default for SyncUnsafeCell<T> {
}
#[unstable(feature = "sync_unsafe_cell", issue = "95439")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for SyncUnsafeCell<T> {
+impl<T> From<T> for SyncUnsafeCell<T> {
/// Creates a new `SyncUnsafeCell<T>` containing the given value.
fn from(t: T) -> SyncUnsafeCell<T> {
SyncUnsafeCell::new(t)
diff --git a/library/core/src/cell/lazy.rs b/library/core/src/cell/lazy.rs
index 44adcfa1a..1b213f6a2 100644
--- a/library/core/src/cell/lazy.rs
+++ b/library/core/src/cell/lazy.rs
@@ -63,6 +63,34 @@ impl<T, F: FnOnce() -> T> LazyCell<T, F> {
LazyCell { state: UnsafeCell::new(State::Uninit(f)) }
}
+ /// Consumes this `LazyCell` returning the stored value.
+ ///
+ /// Returns `Ok(value)` if `Lazy` is initialized and `Err(f)` otherwise.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(lazy_cell)]
+ /// #![feature(lazy_cell_consume)]
+ ///
+ /// use std::cell::LazyCell;
+ ///
+ /// let hello = "Hello, World!".to_string();
+ ///
+ /// let lazy = LazyCell::new(|| hello.to_uppercase());
+ ///
+ /// assert_eq!(&*lazy, "HELLO, WORLD!");
+ /// assert_eq!(LazyCell::into_inner(lazy).ok(), Some("HELLO, WORLD!".to_string()));
+ /// ```
+ #[unstable(feature = "lazy_cell_consume", issue = "109736")]
+ pub fn into_inner(this: Self) -> Result<T, F> {
+ match this.state.into_inner() {
+ State::Init(data) => Ok(data),
+ State::Uninit(f) => Err(f),
+ State::Poisoned => panic!("LazyCell instance has previously been poisoned"),
+ }
+ }
+
/// Forces the evaluation of this lazy value and returns a reference to
/// the result.
///
diff --git a/library/core/src/cell/once.rs b/library/core/src/cell/once.rs
index f7cd3ec5f..5f06a7b07 100644
--- a/library/core/src/cell/once.rs
+++ b/library/core/src/cell/once.rs
@@ -284,8 +284,7 @@ impl<T: PartialEq> PartialEq for OnceCell<T> {
impl<T: Eq> Eq for OnceCell<T> {}
#[stable(feature = "once_cell", since = "1.70.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for OnceCell<T> {
+impl<T> From<T> for OnceCell<T> {
/// Creates a new `OnceCell<T>` which already contains the given `value`.
#[inline]
fn from(value: T) -> Self {
diff --git a/library/core/src/char/convert.rs b/library/core/src/char/convert.rs
index 136bbcb8b..b84e4b35b 100644
--- a/library/core/src/char/convert.rs
+++ b/library/core/src/char/convert.rs
@@ -27,8 +27,7 @@ pub(super) const unsafe fn from_u32_unchecked(i: u32) -> char {
}
#[stable(feature = "char_convert", since = "1.13.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<char> for u32 {
+impl From<char> for u32 {
/// Converts a [`char`] into a [`u32`].
///
/// # Examples
@@ -47,8 +46,7 @@ impl const From<char> for u32 {
}
#[stable(feature = "more_char_conversions", since = "1.51.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<char> for u64 {
+impl From<char> for u64 {
/// Converts a [`char`] into a [`u64`].
///
/// # Examples
@@ -69,8 +67,7 @@ impl const From<char> for u64 {
}
#[stable(feature = "more_char_conversions", since = "1.51.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<char> for u128 {
+impl From<char> for u128 {
/// Converts a [`char`] into a [`u128`].
///
/// # Examples
@@ -123,8 +120,7 @@ impl TryFrom<char> for u8 {
/// for a superset of Windows-1252 that fills the remaining blanks with corresponding
/// C0 and C1 control codes.
#[stable(feature = "char_convert", since = "1.13.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<u8> for char {
+impl From<u8> for char {
/// Converts a [`u8`] into a [`char`].
///
/// # Examples
diff --git a/library/core/src/char/methods.rs b/library/core/src/char/methods.rs
index 9bc97ea0b..515b8d20e 100644
--- a/library/core/src/char/methods.rs
+++ b/library/core/src/char/methods.rs
@@ -1,5 +1,6 @@
//! impl char {}
+use crate::ascii;
use crate::slice;
use crate::str::from_utf8_unchecked_mut;
use crate::unicode::printable::is_printable;
@@ -380,20 +381,7 @@ impl char {
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn escape_unicode(self) -> EscapeUnicode {
- let c = self as u32;
-
- // or-ing 1 ensures that for c==0 the code computes that one
- // digit should be printed and (which is the same) avoids the
- // (31 - 32) underflow
- let msb = 31 - (c | 1).leading_zeros();
-
- // the index of the most significant hex digit
- let ms_hex_digit = msb / 4;
- EscapeUnicode {
- c: self,
- state: EscapeUnicodeState::Backslash,
- hex_digit_idx: ms_hex_digit as usize,
- }
+ EscapeUnicode::new(self)
}
/// An extended version of `escape_debug` that optionally permits escaping
@@ -403,21 +391,20 @@ impl char {
/// characters, and double quotes in strings.
#[inline]
pub(crate) fn escape_debug_ext(self, args: EscapeDebugExtArgs) -> EscapeDebug {
- let init_state = match self {
- '\0' => EscapeDefaultState::Backslash('0'),
- '\t' => EscapeDefaultState::Backslash('t'),
- '\r' => EscapeDefaultState::Backslash('r'),
- '\n' => EscapeDefaultState::Backslash('n'),
- '\\' => EscapeDefaultState::Backslash(self),
- '"' if args.escape_double_quote => EscapeDefaultState::Backslash(self),
- '\'' if args.escape_single_quote => EscapeDefaultState::Backslash(self),
+ match self {
+ '\0' => EscapeDebug::backslash(ascii::Char::Digit0),
+ '\t' => EscapeDebug::backslash(ascii::Char::SmallT),
+ '\r' => EscapeDebug::backslash(ascii::Char::SmallR),
+ '\n' => EscapeDebug::backslash(ascii::Char::SmallN),
+ '\\' => EscapeDebug::backslash(ascii::Char::ReverseSolidus),
+ '\"' if args.escape_double_quote => EscapeDebug::backslash(ascii::Char::QuotationMark),
+ '\'' if args.escape_single_quote => EscapeDebug::backslash(ascii::Char::Apostrophe),
_ if args.escape_grapheme_extended && self.is_grapheme_extended() => {
- EscapeDefaultState::Unicode(self.escape_unicode())
+ EscapeDebug::from_unicode(self.escape_unicode())
}
- _ if is_printable(self) => EscapeDefaultState::Char(self),
- _ => EscapeDefaultState::Unicode(self.escape_unicode()),
- };
- EscapeDebug(EscapeDefault { state: init_state })
+ _ if is_printable(self) => EscapeDebug::printable(self),
+ _ => EscapeDebug::from_unicode(self.escape_unicode()),
+ }
}
/// Returns an iterator that yields the literal escape code of a character
@@ -515,15 +502,14 @@ impl char {
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn escape_default(self) -> EscapeDefault {
- let init_state = match self {
- '\t' => EscapeDefaultState::Backslash('t'),
- '\r' => EscapeDefaultState::Backslash('r'),
- '\n' => EscapeDefaultState::Backslash('n'),
- '\\' | '\'' | '"' => EscapeDefaultState::Backslash(self),
- '\x20'..='\x7e' => EscapeDefaultState::Char(self),
- _ => EscapeDefaultState::Unicode(self.escape_unicode()),
- };
- EscapeDefault { state: init_state }
+ match self {
+ '\t' => EscapeDefault::backslash(ascii::Char::SmallT),
+ '\r' => EscapeDefault::backslash(ascii::Char::SmallR),
+ '\n' => EscapeDefault::backslash(ascii::Char::SmallN),
+ '\\' | '\'' | '"' => EscapeDefault::backslash(self.as_ascii().unwrap()),
+ '\x20'..='\x7e' => EscapeDefault::printable(self.as_ascii().unwrap()),
+ _ => EscapeDefault::from_unicode(self.escape_unicode()),
+ }
}
/// Returns the number of bytes this `char` would need if encoded in UTF-8.
@@ -1116,6 +1102,24 @@ impl char {
*self as u32 <= 0x7F
}
+ /// Returns `Some` if the value is within the ASCII range,
+ /// or `None` if it's not.
+ ///
+ /// This is preferred to [`Self::is_ascii`] when you're passing the value
+ /// along to something else that can take [`ascii::Char`] rather than
+ /// needing to check again for itself whether the value is in ASCII.
+ #[must_use]
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn as_ascii(&self) -> Option<ascii::Char> {
+ if self.is_ascii() {
+ // SAFETY: Just checked that this is ASCII.
+ Some(unsafe { ascii::Char::from_u8_unchecked(*self as u8) })
+ } else {
+ None
+ }
+ }
+
/// Makes a copy of the value in its ASCII upper case equivalent.
///
/// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
diff --git a/library/core/src/char/mod.rs b/library/core/src/char/mod.rs
index 8ec78e887..5c4291287 100644
--- a/library/core/src/char/mod.rs
+++ b/library/core/src/char/mod.rs
@@ -38,9 +38,12 @@ pub use self::methods::encode_utf16_raw;
#[unstable(feature = "char_internals", reason = "exposed only for libstd", issue = "none")]
pub use self::methods::encode_utf8_raw;
+use crate::ascii;
use crate::error::Error;
+use crate::escape;
use crate::fmt::{self, Write};
use crate::iter::FusedIterator;
+use crate::num::NonZeroUsize;
pub(crate) use self::methods::EscapeDebugExtArgs;
@@ -146,86 +149,44 @@ pub const fn from_digit(num: u32, radix: u32) -> Option<char> {
/// [`escape_unicode`]: char::escape_unicode
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
-pub struct EscapeUnicode {
- c: char,
- state: EscapeUnicodeState,
-
- // The index of the next hex digit to be printed (0 if none),
- // i.e., the number of remaining hex digits to be printed;
- // increasing from the least significant digit: 0x543210
- hex_digit_idx: usize,
-}
+pub struct EscapeUnicode(escape::EscapeIterInner<10>);
-// The enum values are ordered so that their representation is the
-// same as the remaining length (besides the hexadecimal digits). This
-// likely makes `len()` a single load from memory) and inline-worth.
-#[derive(Clone, Debug)]
-enum EscapeUnicodeState {
- Done,
- RightBrace,
- Value,
- LeftBrace,
- Type,
- Backslash,
+impl EscapeUnicode {
+ fn new(chr: char) -> Self {
+ let mut data = [ascii::Char::Null; 10];
+ let range = escape::escape_unicode_into(&mut data, chr);
+ Self(escape::EscapeIterInner::new(data, range))
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for EscapeUnicode {
type Item = char;
+ #[inline]
fn next(&mut self) -> Option<char> {
- match self.state {
- EscapeUnicodeState::Backslash => {
- self.state = EscapeUnicodeState::Type;
- Some('\\')
- }
- EscapeUnicodeState::Type => {
- self.state = EscapeUnicodeState::LeftBrace;
- Some('u')
- }
- EscapeUnicodeState::LeftBrace => {
- self.state = EscapeUnicodeState::Value;
- Some('{')
- }
- EscapeUnicodeState::Value => {
- let hex_digit = ((self.c as u32) >> (self.hex_digit_idx * 4)) & 0xf;
- let c = char::from_digit(hex_digit, 16).unwrap();
- if self.hex_digit_idx == 0 {
- self.state = EscapeUnicodeState::RightBrace;
- } else {
- self.hex_digit_idx -= 1;
- }
- Some(c)
- }
- EscapeUnicodeState::RightBrace => {
- self.state = EscapeUnicodeState::Done;
- Some('}')
- }
- EscapeUnicodeState::Done => None,
- }
+ self.0.next().map(char::from)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- let n = self.len();
+ let n = self.0.len();
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
- self.len()
+ self.0.len()
}
- fn last(self) -> Option<char> {
- match self.state {
- EscapeUnicodeState::Done => None,
+ #[inline]
+ fn last(mut self) -> Option<char> {
+ self.0.next_back().map(char::from)
+ }
- EscapeUnicodeState::RightBrace
- | EscapeUnicodeState::Value
- | EscapeUnicodeState::LeftBrace
- | EscapeUnicodeState::Type
- | EscapeUnicodeState::Backslash => Some('}'),
- }
+ #[inline]
+ fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
+ self.0.advance_by(n)
}
}
@@ -233,16 +194,7 @@ impl Iterator for EscapeUnicode {
impl ExactSizeIterator for EscapeUnicode {
#[inline]
fn len(&self) -> usize {
- // The match is a single memory access with no branching
- self.hex_digit_idx
- + match self.state {
- EscapeUnicodeState::Done => 0,
- EscapeUnicodeState::RightBrace => 1,
- EscapeUnicodeState::Value => 2,
- EscapeUnicodeState::LeftBrace => 3,
- EscapeUnicodeState::Type => 4,
- EscapeUnicodeState::Backslash => 5,
- }
+ self.0.len()
}
}
@@ -252,10 +204,7 @@ impl FusedIterator for EscapeUnicode {}
#[stable(feature = "char_struct_display", since = "1.16.0")]
impl fmt::Display for EscapeUnicode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- for c in self.clone() {
- f.write_char(c)?;
- }
- Ok(())
+ f.write_str(self.0.as_str())
}
}
@@ -267,90 +216,60 @@ impl fmt::Display for EscapeUnicode {
/// [`escape_default`]: char::escape_default
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
-pub struct EscapeDefault {
- state: EscapeDefaultState,
-}
+pub struct EscapeDefault(escape::EscapeIterInner<10>);
-#[derive(Clone, Debug)]
-enum EscapeDefaultState {
- Done,
- Char(char),
- Backslash(char),
- Unicode(EscapeUnicode),
+impl EscapeDefault {
+ fn printable(chr: ascii::Char) -> Self {
+ let data = [chr];
+ Self(escape::EscapeIterInner::from_array(data))
+ }
+
+ fn backslash(chr: ascii::Char) -> Self {
+ let data = [ascii::Char::ReverseSolidus, chr];
+ Self(escape::EscapeIterInner::from_array(data))
+ }
+
+ fn from_unicode(esc: EscapeUnicode) -> Self {
+ Self(esc.0)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Iterator for EscapeDefault {
type Item = char;
+ #[inline]
fn next(&mut self) -> Option<char> {
- match self.state {
- EscapeDefaultState::Backslash(c) => {
- self.state = EscapeDefaultState::Char(c);
- Some('\\')
- }
- EscapeDefaultState::Char(c) => {
- self.state = EscapeDefaultState::Done;
- Some(c)
- }
- EscapeDefaultState::Done => None,
- EscapeDefaultState::Unicode(ref mut iter) => iter.next(),
- }
+ self.0.next().map(char::from)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- let n = self.len();
+ let n = self.0.len();
(n, Some(n))
}
#[inline]
fn count(self) -> usize {
- self.len()
+ self.0.len()
}
- fn nth(&mut self, n: usize) -> Option<char> {
- match self.state {
- EscapeDefaultState::Backslash(c) if n == 0 => {
- self.state = EscapeDefaultState::Char(c);
- Some('\\')
- }
- EscapeDefaultState::Backslash(c) if n == 1 => {
- self.state = EscapeDefaultState::Done;
- Some(c)
- }
- EscapeDefaultState::Backslash(_) => {
- self.state = EscapeDefaultState::Done;
- None
- }
- EscapeDefaultState::Char(c) => {
- self.state = EscapeDefaultState::Done;
-
- if n == 0 { Some(c) } else { None }
- }
- EscapeDefaultState::Done => None,
- EscapeDefaultState::Unicode(ref mut i) => i.nth(n),
- }
+ #[inline]
+ fn last(mut self) -> Option<char> {
+ self.0.next_back().map(char::from)
}
- fn last(self) -> Option<char> {
- match self.state {
- EscapeDefaultState::Unicode(iter) => iter.last(),
- EscapeDefaultState::Done => None,
- EscapeDefaultState::Backslash(c) | EscapeDefaultState::Char(c) => Some(c),
- }
+ #[inline]
+ fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
+ self.0.advance_by(n)
}
}
#[stable(feature = "exact_size_escape", since = "1.11.0")]
impl ExactSizeIterator for EscapeDefault {
+ #[inline]
fn len(&self) -> usize {
- match self.state {
- EscapeDefaultState::Done => 0,
- EscapeDefaultState::Char(_) => 1,
- EscapeDefaultState::Backslash(_) => 2,
- EscapeDefaultState::Unicode(ref iter) => iter.len(),
- }
+ self.0.len()
}
}
@@ -360,10 +279,7 @@ impl FusedIterator for EscapeDefault {}
#[stable(feature = "char_struct_display", since = "1.16.0")]
impl fmt::Display for EscapeDefault {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- for c in self.clone() {
- f.write_char(c)?;
- }
- Ok(())
+ f.write_str(self.0.as_str())
}
}
@@ -375,21 +291,74 @@ impl fmt::Display for EscapeDefault {
/// [`escape_debug`]: char::escape_debug
#[stable(feature = "char_escape_debug", since = "1.20.0")]
#[derive(Clone, Debug)]
-pub struct EscapeDebug(EscapeDefault);
+pub struct EscapeDebug(EscapeDebugInner);
+
+#[derive(Clone, Debug)]
+// Note: It’s possible to manually encode the EscapeDebugInner inside of
+// EscapeIterInner (e.g. with alive=254..255 indicating that data[0..4] holds
+// a char) which would likely result in a more optimised code. For now we use
+// the option easier to implement.
+enum EscapeDebugInner {
+ Bytes(escape::EscapeIterInner<10>),
+ Char(char),
+}
+
+impl EscapeDebug {
+ fn printable(chr: char) -> Self {
+ Self(EscapeDebugInner::Char(chr))
+ }
+
+ fn backslash(chr: ascii::Char) -> Self {
+ let data = [ascii::Char::ReverseSolidus, chr];
+ let iter = escape::EscapeIterInner::from_array(data);
+ Self(EscapeDebugInner::Bytes(iter))
+ }
+
+ fn from_unicode(esc: EscapeUnicode) -> Self {
+ Self(EscapeDebugInner::Bytes(esc.0))
+ }
+
+ fn clear(&mut self) {
+ let bytes = escape::EscapeIterInner::from_array([]);
+ self.0 = EscapeDebugInner::Bytes(bytes);
+ }
+}
#[stable(feature = "char_escape_debug", since = "1.20.0")]
impl Iterator for EscapeDebug {
type Item = char;
+
+ #[inline]
fn next(&mut self) -> Option<char> {
- self.0.next()
+ match self.0 {
+ EscapeDebugInner::Bytes(ref mut bytes) => bytes.next().map(char::from),
+ EscapeDebugInner::Char(chr) => {
+ self.clear();
+ Some(chr)
+ }
+ }
}
+
fn size_hint(&self) -> (usize, Option<usize>) {
- self.0.size_hint()
+ let n = self.len();
+ (n, Some(n))
+ }
+
+ #[inline]
+ fn count(self) -> usize {
+ self.len()
}
}
#[stable(feature = "char_escape_debug", since = "1.20.0")]
-impl ExactSizeIterator for EscapeDebug {}
+impl ExactSizeIterator for EscapeDebug {
+ fn len(&self) -> usize {
+ match &self.0 {
+ EscapeDebugInner::Bytes(bytes) => bytes.len(),
+ EscapeDebugInner::Char(_) => 1,
+ }
+ }
+}
#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for EscapeDebug {}
@@ -397,7 +366,10 @@ impl FusedIterator for EscapeDebug {}
#[stable(feature = "char_escape_debug", since = "1.20.0")]
impl fmt::Display for EscapeDebug {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(&self.0, f)
+ match &self.0 {
+ EscapeDebugInner::Bytes(bytes) => f.write_str(bytes.as_str()),
+ EscapeDebugInner::Char(chr) => f.write_char(*chr),
+ }
}
}
diff --git a/library/core/src/clone.rs b/library/core/src/clone.rs
index 398437d9a..a6d6230d3 100644
--- a/library/core/src/clone.rs
+++ b/library/core/src/clone.rs
@@ -36,8 +36,6 @@
#![stable(feature = "rust1", since = "1.0.0")]
-use crate::marker::Destruct;
-
/// A common trait for the ability to explicitly duplicate an object.
///
/// Differs from [`Copy`] in that [`Copy`] is implicit and an inexpensive bit-wise copy, while
@@ -106,7 +104,6 @@ use crate::marker::Destruct;
#[lang = "clone"]
#[rustc_diagnostic_item = "Clone"]
#[rustc_trivial_field_reads]
-#[const_trait]
pub trait Clone: Sized {
/// Returns a copy of the value.
///
@@ -129,10 +126,7 @@ pub trait Clone: Sized {
/// allocations.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- fn clone_from(&mut self, source: &Self)
- where
- Self: ~const Destruct,
- {
+ fn clone_from(&mut self, source: &Self) {
*self = source.clone()
}
}
@@ -182,8 +176,7 @@ mod impls {
($($t:ty)*) => {
$(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_clone", issue = "91805")]
- impl const Clone for $t {
+ impl Clone for $t {
#[inline(always)]
fn clone(&self) -> Self {
*self
@@ -201,8 +194,7 @@ mod impls {
}
#[unstable(feature = "never_type", issue = "35121")]
- #[rustc_const_unstable(feature = "const_clone", issue = "91805")]
- impl const Clone for ! {
+ impl Clone for ! {
#[inline]
fn clone(&self) -> Self {
*self
@@ -210,8 +202,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_clone", issue = "91805")]
- impl<T: ?Sized> const Clone for *const T {
+ impl<T: ?Sized> Clone for *const T {
#[inline(always)]
fn clone(&self) -> Self {
*self
@@ -219,8 +210,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_clone", issue = "91805")]
- impl<T: ?Sized> const Clone for *mut T {
+ impl<T: ?Sized> Clone for *mut T {
#[inline(always)]
fn clone(&self) -> Self {
*self
@@ -229,8 +219,7 @@ mod impls {
/// Shared references can be cloned, but mutable references *cannot*!
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_clone", issue = "91805")]
- impl<T: ?Sized> const Clone for &T {
+ impl<T: ?Sized> Clone for &T {
#[inline(always)]
#[rustc_diagnostic_item = "noop_method_clone"]
fn clone(&self) -> Self {
diff --git a/library/core/src/cmp.rs b/library/core/src/cmp.rs
index 55331475a..faf48ae57 100644
--- a/library/core/src/cmp.rs
+++ b/library/core/src/cmp.rs
@@ -25,8 +25,6 @@
mod bytewise;
pub(crate) use bytewise::BytewiseEq;
-use crate::marker::Destruct;
-
use self::Ordering::*;
/// Trait for equality comparisons.
@@ -212,7 +210,6 @@ use self::Ordering::*;
label = "no implementation for `{Self} == {Rhs}`",
append_const_msg
)]
-#[const_trait]
#[rustc_diagnostic_item = "PartialEq"]
pub trait PartialEq<Rhs: ?Sized = Self> {
/// This method tests for `self` and `other` values to be equal, and is used
@@ -324,17 +321,13 @@ pub struct AssertParamIsEq<T: Eq + ?Sized> {
/// ```
/// use std::cmp::Ordering;
///
-/// let result = 1.cmp(&2);
-/// assert_eq!(Ordering::Less, result);
+/// assert_eq!(1.cmp(&2), Ordering::Less);
///
-/// let result = 1.cmp(&1);
-/// assert_eq!(Ordering::Equal, result);
+/// assert_eq!(1.cmp(&1), Ordering::Equal);
///
-/// let result = 2.cmp(&1);
-/// assert_eq!(Ordering::Greater, result);
+/// assert_eq!(2.cmp(&1), Ordering::Greater);
/// ```
-#[derive(Clone, Copy, Eq, Debug, Hash)]
-#[derive_const(PartialOrd, Ord, PartialEq)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
#[repr(i8)]
pub enum Ordering {
@@ -604,8 +597,7 @@ impl Ordering {
pub struct Reverse<T>(#[stable(feature = "reverse_cmp_key", since = "1.19.0")] pub T);
#[stable(feature = "reverse_cmp_key", since = "1.19.0")]
-#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
-impl<T: ~const PartialOrd> const PartialOrd for Reverse<T> {
+impl<T: PartialOrd> PartialOrd for Reverse<T> {
#[inline]
fn partial_cmp(&self, other: &Reverse<T>) -> Option<Ordering> {
other.0.partial_cmp(&self.0)
@@ -763,7 +755,6 @@ impl<T: Clone> Clone for Reverse<T> {
#[doc(alias = ">=")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "Ord"]
-#[const_trait]
pub trait Ord: Eq + PartialOrd<Self> {
/// This method returns an [`Ordering`] between `self` and `other`.
///
@@ -790,8 +781,8 @@ pub trait Ord: Eq + PartialOrd<Self> {
/// # Examples
///
/// ```
- /// assert_eq!(2, 1.max(2));
- /// assert_eq!(2, 2.max(2));
+ /// assert_eq!(1.max(2), 2);
+ /// assert_eq!(2.max(2), 2);
/// ```
#[stable(feature = "ord_max_min", since = "1.21.0")]
#[inline]
@@ -799,7 +790,6 @@ pub trait Ord: Eq + PartialOrd<Self> {
fn max(self, other: Self) -> Self
where
Self: Sized,
- Self: ~const Destruct,
{
max_by(self, other, Ord::cmp)
}
@@ -811,8 +801,8 @@ pub trait Ord: Eq + PartialOrd<Self> {
/// # Examples
///
/// ```
- /// assert_eq!(1, 1.min(2));
- /// assert_eq!(2, 2.min(2));
+ /// assert_eq!(1.min(2), 1);
+ /// assert_eq!(2.min(2), 2);
/// ```
#[stable(feature = "ord_max_min", since = "1.21.0")]
#[inline]
@@ -820,7 +810,6 @@ pub trait Ord: Eq + PartialOrd<Self> {
fn min(self, other: Self) -> Self
where
Self: Sized,
- Self: ~const Destruct,
{
min_by(self, other, Ord::cmp)
}
@@ -837,17 +826,16 @@ pub trait Ord: Eq + PartialOrd<Self> {
/// # Examples
///
/// ```
- /// assert!((-3).clamp(-2, 1) == -2);
- /// assert!(0.clamp(-2, 1) == 0);
- /// assert!(2.clamp(-2, 1) == 1);
+ /// assert_eq!((-3).clamp(-2, 1), -2);
+ /// assert_eq!(0.clamp(-2, 1), 0);
+ /// assert_eq!(2.clamp(-2, 1), 1);
/// ```
#[must_use]
#[stable(feature = "clamp", since = "1.50.0")]
fn clamp(self, min: Self, max: Self) -> Self
where
Self: Sized,
- Self: ~const Destruct,
- Self: ~const PartialOrd,
+ Self: PartialOrd,
{
assert!(min <= max);
if self < min {
@@ -1035,7 +1023,6 @@ pub macro Ord($item:item) {
label = "no implementation for `{Self} < {Rhs}` and `{Self} > {Rhs}`",
append_const_msg
)]
-#[const_trait]
#[rustc_diagnostic_item = "PartialOrd"]
pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
/// This method returns an ordering between `self` and `other` values if one exists.
@@ -1070,11 +1057,9 @@ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
/// # Examples
///
/// ```
- /// let result = 1.0 < 2.0;
- /// assert_eq!(result, true);
- ///
- /// let result = 2.0 < 1.0;
- /// assert_eq!(result, false);
+ /// assert_eq!(1.0 < 1.0, false);
+ /// assert_eq!(1.0 < 2.0, true);
+ /// assert_eq!(2.0 < 1.0, false);
/// ```
#[inline]
#[must_use]
@@ -1089,11 +1074,9 @@ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
/// # Examples
///
/// ```
- /// let result = 1.0 <= 2.0;
- /// assert_eq!(result, true);
- ///
- /// let result = 2.0 <= 2.0;
- /// assert_eq!(result, true);
+ /// assert_eq!(1.0 <= 1.0, true);
+ /// assert_eq!(1.0 <= 2.0, true);
+ /// assert_eq!(2.0 <= 1.0, false);
/// ```
#[inline]
#[must_use]
@@ -1107,11 +1090,9 @@ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
/// # Examples
///
/// ```
- /// let result = 1.0 > 2.0;
- /// assert_eq!(result, false);
- ///
- /// let result = 2.0 > 2.0;
- /// assert_eq!(result, false);
+ /// assert_eq!(1.0 > 1.0, false);
+ /// assert_eq!(1.0 > 2.0, false);
+ /// assert_eq!(2.0 > 1.0, true);
/// ```
#[inline]
#[must_use]
@@ -1126,11 +1107,9 @@ pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
/// # Examples
///
/// ```
- /// let result = 2.0 >= 1.0;
- /// assert_eq!(result, true);
- ///
- /// let result = 2.0 >= 2.0;
- /// assert_eq!(result, true);
+ /// assert_eq!(1.0 >= 1.0, true);
+ /// assert_eq!(1.0 >= 2.0, false);
+ /// assert_eq!(2.0 >= 1.0, true);
/// ```
#[inline]
#[must_use]
@@ -1160,15 +1139,14 @@ pub macro PartialOrd($item:item) {
/// ```
/// use std::cmp;
///
-/// assert_eq!(1, cmp::min(1, 2));
-/// assert_eq!(2, cmp::min(2, 2));
+/// assert_eq!(cmp::min(1, 2), 1);
+/// assert_eq!(cmp::min(2, 2), 2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
#[cfg_attr(not(test), rustc_diagnostic_item = "cmp_min")]
-pub const fn min<T: ~const Ord + ~const Destruct>(v1: T, v2: T) -> T {
+pub fn min<T: Ord>(v1: T, v2: T) -> T {
v1.min(v2)
}
@@ -1181,18 +1159,16 @@ pub const fn min<T: ~const Ord + ~const Destruct>(v1: T, v2: T) -> T {
/// ```
/// use std::cmp;
///
-/// assert_eq!(cmp::min_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 1);
-/// assert_eq!(cmp::min_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2);
+/// let result = cmp::min_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs()));
+/// assert_eq!(result, 1);
+///
+/// let result = cmp::min_by(-2, 3, |x: &i32, y: &i32| x.abs().cmp(&y.abs()));
+/// assert_eq!(result, -2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "cmp_min_max_by", since = "1.53.0")]
-#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
-pub const fn min_by<T, F: ~const FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T
-where
- T: ~const Destruct,
- F: ~const Destruct,
-{
+pub fn min_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
match compare(&v1, &v2) {
Ordering::Less | Ordering::Equal => v1,
Ordering::Greater => v2,
@@ -1208,20 +1184,17 @@ where
/// ```
/// use std::cmp;
///
-/// assert_eq!(cmp::min_by_key(-2, 1, |x: &i32| x.abs()), 1);
-/// assert_eq!(cmp::min_by_key(-2, 2, |x: &i32| x.abs()), -2);
+/// let result = cmp::min_by_key(-2, 1, |x: &i32| x.abs());
+/// assert_eq!(result, 1);
+///
+/// let result = cmp::min_by_key(-2, 2, |x: &i32| x.abs());
+/// assert_eq!(result, -2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "cmp_min_max_by", since = "1.53.0")]
-#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
-pub const fn min_by_key<T, F: ~const FnMut(&T) -> K, K: ~const Ord>(v1: T, v2: T, mut f: F) -> T
-where
- T: ~const Destruct,
- F: ~const Destruct,
- K: ~const Destruct,
-{
- min_by(v1, v2, const |v1, v2| f(v1).cmp(&f(v2)))
+pub fn min_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
+ min_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))
}
/// Compares and returns the maximum of two values.
@@ -1235,15 +1208,14 @@ where
/// ```
/// use std::cmp;
///
-/// assert_eq!(2, cmp::max(1, 2));
-/// assert_eq!(2, cmp::max(2, 2));
+/// assert_eq!(cmp::max(1, 2), 2);
+/// assert_eq!(cmp::max(2, 2), 2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
#[cfg_attr(not(test), rustc_diagnostic_item = "cmp_max")]
-pub const fn max<T: ~const Ord + ~const Destruct>(v1: T, v2: T) -> T {
+pub fn max<T: Ord>(v1: T, v2: T) -> T {
v1.max(v2)
}
@@ -1256,18 +1228,16 @@ pub const fn max<T: ~const Ord + ~const Destruct>(v1: T, v2: T) -> T {
/// ```
/// use std::cmp;
///
-/// assert_eq!(cmp::max_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2);
-/// assert_eq!(cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 2);
+/// let result = cmp::max_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs()));
+/// assert_eq!(result, -2);
+///
+/// let result = cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())) ;
+/// assert_eq!(result, 2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "cmp_min_max_by", since = "1.53.0")]
-#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
-pub const fn max_by<T, F: ~const FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T
-where
- T: ~const Destruct,
- F: ~const Destruct,
-{
+pub fn max_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
match compare(&v1, &v2) {
Ordering::Less | Ordering::Equal => v2,
Ordering::Greater => v1,
@@ -1283,20 +1253,17 @@ where
/// ```
/// use std::cmp;
///
-/// assert_eq!(cmp::max_by_key(-2, 1, |x: &i32| x.abs()), -2);
-/// assert_eq!(cmp::max_by_key(-2, 2, |x: &i32| x.abs()), 2);
+/// let result = cmp::max_by_key(-2, 1, |x: &i32| x.abs());
+/// assert_eq!(result, -2);
+///
+/// let result = cmp::max_by_key(-2, 2, |x: &i32| x.abs());
+/// assert_eq!(result, 2);
/// ```
#[inline]
#[must_use]
#[stable(feature = "cmp_min_max_by", since = "1.53.0")]
-#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
-pub const fn max_by_key<T, F: ~const FnMut(&T) -> K, K: ~const Ord>(v1: T, v2: T, mut f: F) -> T
-where
- T: ~const Destruct,
- F: ~const Destruct,
- K: ~const Destruct,
-{
- max_by(v1, v2, const |v1, v2| f(v1).cmp(&f(v2)))
+pub fn max_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
+ max_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))
}
// Implementation of PartialEq, Eq, PartialOrd and Ord for primitive types
@@ -1307,8 +1274,7 @@ mod impls {
macro_rules! partial_eq_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialEq for $t {
+ impl PartialEq for $t {
#[inline]
fn eq(&self, other: &$t) -> bool { (*self) == (*other) }
#[inline]
@@ -1318,8 +1284,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialEq for () {
+ impl PartialEq for () {
#[inline]
fn eq(&self, _other: &()) -> bool {
true
@@ -1346,8 +1311,7 @@ mod impls {
macro_rules! partial_ord_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialOrd for $t {
+ impl PartialOrd for $t {
#[inline]
fn partial_cmp(&self, other: &$t) -> Option<Ordering> {
match (*self <= *other, *self >= *other) {
@@ -1357,21 +1321,20 @@ mod impls {
(true, true) => Some(Equal),
}
}
- #[inline]
+ #[inline(always)]
fn lt(&self, other: &$t) -> bool { (*self) < (*other) }
- #[inline]
+ #[inline(always)]
fn le(&self, other: &$t) -> bool { (*self) <= (*other) }
- #[inline]
+ #[inline(always)]
fn ge(&self, other: &$t) -> bool { (*self) >= (*other) }
- #[inline]
+ #[inline(always)]
fn gt(&self, other: &$t) -> bool { (*self) > (*other) }
}
)*)
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialOrd for () {
+ impl PartialOrd for () {
#[inline]
fn partial_cmp(&self, _: &()) -> Option<Ordering> {
Some(Equal)
@@ -1379,8 +1342,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialOrd for bool {
+ impl PartialOrd for bool {
#[inline]
fn partial_cmp(&self, other: &bool) -> Option<Ordering> {
Some(self.cmp(other))
@@ -1392,25 +1354,23 @@ mod impls {
macro_rules! ord_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialOrd for $t {
+ impl PartialOrd for $t {
#[inline]
fn partial_cmp(&self, other: &$t) -> Option<Ordering> {
Some(self.cmp(other))
}
- #[inline]
+ #[inline(always)]
fn lt(&self, other: &$t) -> bool { (*self) < (*other) }
- #[inline]
+ #[inline(always)]
fn le(&self, other: &$t) -> bool { (*self) <= (*other) }
- #[inline]
+ #[inline(always)]
fn ge(&self, other: &$t) -> bool { (*self) >= (*other) }
- #[inline]
+ #[inline(always)]
fn gt(&self, other: &$t) -> bool { (*self) > (*other) }
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const Ord for $t {
+ impl Ord for $t {
#[inline]
fn cmp(&self, other: &$t) -> Ordering {
// The order here is important to generate more optimal assembly.
@@ -1424,8 +1384,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const Ord for () {
+ impl Ord for () {
#[inline]
fn cmp(&self, _other: &()) -> Ordering {
Equal
@@ -1433,8 +1392,7 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const Ord for bool {
+ impl Ord for bool {
#[inline]
fn cmp(&self, other: &bool) -> Ordering {
// Casting to i8's and converting the difference to an Ordering generates
@@ -1453,8 +1411,8 @@ mod impls {
ord_impl! { char usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
#[unstable(feature = "never_type", issue = "35121")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialEq for ! {
+ impl PartialEq for ! {
+ #[inline]
fn eq(&self, _: &!) -> bool {
*self
}
@@ -1464,16 +1422,16 @@ mod impls {
impl Eq for ! {}
#[unstable(feature = "never_type", issue = "35121")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const PartialOrd for ! {
+ impl PartialOrd for ! {
+ #[inline]
fn partial_cmp(&self, _: &!) -> Option<Ordering> {
*self
}
}
#[unstable(feature = "never_type", issue = "35121")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl const Ord for ! {
+ impl Ord for ! {
+ #[inline]
fn cmp(&self, _: &!) -> Ordering {
*self
}
@@ -1482,10 +1440,9 @@ mod impls {
// & pointers
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl<A: ?Sized, B: ?Sized> const PartialEq<&B> for &A
+ impl<A: ?Sized, B: ?Sized> PartialEq<&B> for &A
where
- A: ~const PartialEq<B>,
+ A: PartialEq<B>,
{
#[inline]
fn eq(&self, other: &&B) -> bool {
@@ -1497,10 +1454,9 @@ mod impls {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl<A: ?Sized, B: ?Sized> const PartialOrd<&B> for &A
+ impl<A: ?Sized, B: ?Sized> PartialOrd<&B> for &A
where
- A: ~const PartialOrd<B>,
+ A: PartialOrd<B>,
{
#[inline]
fn partial_cmp(&self, other: &&B) -> Option<Ordering> {
diff --git a/library/core/src/convert/mod.rs b/library/core/src/convert/mod.rs
index 5888e2960..38a6d1ccd 100644
--- a/library/core/src/convert/mod.rs
+++ b/library/core/src/convert/mod.rs
@@ -214,7 +214,6 @@ pub const fn identity<T>(x: T) -> T {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "AsRef")]
-#[const_trait]
pub trait AsRef<T: ?Sized> {
/// Converts this type into a shared reference of the (usually inferred) input type.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -366,7 +365,6 @@ pub trait AsRef<T: ?Sized> {
/// `&mut Vec<u8>`, for example, is the better choice (callers need to pass the correct type then).
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "AsMut")]
-#[const_trait]
pub trait AsMut<T: ?Sized> {
/// Converts this type into a mutable reference of the (usually inferred) input type.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -443,7 +441,6 @@ pub trait AsMut<T: ?Sized> {
/// [`Vec`]: ../../std/vec/struct.Vec.html
#[rustc_diagnostic_item = "Into"]
#[stable(feature = "rust1", since = "1.0.0")]
-#[const_trait]
pub trait Into<T>: Sized {
/// Converts this type into the (usually inferred) input type.
#[must_use]
@@ -539,7 +536,6 @@ pub trait Into<T>: Sized {
all(_Self = "&str", T = "std::string::String"),
note = "to coerce a `{T}` into a `{Self}`, use `&*` as a prefix",
))]
-#[const_trait]
pub trait From<T>: Sized {
/// Converts to this type from the input type.
#[rustc_diagnostic_item = "from_fn"]
@@ -564,7 +560,6 @@ pub trait From<T>: Sized {
/// [`Into`], see there for details.
#[rustc_diagnostic_item = "TryInto"]
#[stable(feature = "try_from", since = "1.34.0")]
-#[const_trait]
pub trait TryInto<T>: Sized {
/// The type returned in the event of a conversion error.
#[stable(feature = "try_from", since = "1.34.0")]
@@ -641,7 +636,6 @@ pub trait TryInto<T>: Sized {
/// [`try_from`]: TryFrom::try_from
#[rustc_diagnostic_item = "TryFrom"]
#[stable(feature = "try_from", since = "1.34.0")]
-#[const_trait]
pub trait TryFrom<T>: Sized {
/// The type returned in the event of a conversion error.
#[stable(feature = "try_from", since = "1.34.0")]
@@ -658,10 +652,9 @@ pub trait TryFrom<T>: Sized {
// As lifts over &
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T: ?Sized, U: ?Sized> const AsRef<U> for &T
+impl<T: ?Sized, U: ?Sized> AsRef<U> for &T
where
- T: ~const AsRef<U>,
+ T: AsRef<U>,
{
#[inline]
fn as_ref(&self) -> &U {
@@ -671,10 +664,9 @@ where
// As lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T: ?Sized, U: ?Sized> const AsRef<U> for &mut T
+impl<T: ?Sized, U: ?Sized> AsRef<U> for &mut T
where
- T: ~const AsRef<U>,
+ T: AsRef<U>,
{
#[inline]
fn as_ref(&self) -> &U {
@@ -692,10 +684,9 @@ where
// AsMut lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T: ?Sized, U: ?Sized> const AsMut<U> for &mut T
+impl<T: ?Sized, U: ?Sized> AsMut<U> for &mut T
where
- T: ~const AsMut<U>,
+ T: AsMut<U>,
{
#[inline]
fn as_mut(&mut self) -> &mut U {
@@ -713,10 +704,9 @@ where
// From implies Into
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T, U> const Into<U> for T
+impl<T, U> Into<U> for T
where
- U: ~const From<T>,
+ U: From<T>,
{
/// Calls `U::from(self)`.
///
@@ -730,8 +720,7 @@ where
// From (and thus Into) is reflexive
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for T {
+impl<T> From<T> for T {
/// Returns the argument unchanged.
#[inline(always)]
fn from(t: T) -> T {
@@ -748,8 +737,7 @@ impl<T> const From<T> for T {
#[allow(unused_attributes)] // FIXME(#58633): do a principled fix instead.
#[rustc_reservation_impl = "permitting this impl would forbid us from adding \
`impl<T> From<!> for T` later; see rust-lang/rust#64715 for details"]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<!> for T {
+impl<T> From<!> for T {
fn from(t: !) -> T {
t
}
@@ -757,10 +745,9 @@ impl<T> const From<!> for T {
// TryFrom implies TryInto
#[stable(feature = "try_from", since = "1.34.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T, U> const TryInto<U> for T
+impl<T, U> TryInto<U> for T
where
- U: ~const TryFrom<T>,
+ U: TryFrom<T>,
{
type Error = U::Error;
@@ -773,10 +760,9 @@ where
// Infallible conversions are semantically equivalent to fallible conversions
// with an uninhabited error type.
#[stable(feature = "try_from", since = "1.34.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T, U> const TryFrom<U> for T
+impl<T, U> TryFrom<U> for T
where
- U: ~const Into<T>,
+ U: Into<T>,
{
type Error = Infallible;
@@ -876,8 +862,7 @@ impl AsMut<str> for str {
pub enum Infallible {}
#[stable(feature = "convert_infallible", since = "1.34.0")]
-#[rustc_const_unstable(feature = "const_clone", issue = "91805")]
-impl const Clone for Infallible {
+impl Clone for Infallible {
fn clone(&self) -> Infallible {
match *self {}
}
@@ -929,8 +914,8 @@ impl Ord for Infallible {
}
#[stable(feature = "convert_infallible", since = "1.34.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<!> for Infallible {
+impl From<!> for Infallible {
+ #[inline]
fn from(x: !) -> Self {
x
}
diff --git a/library/core/src/convert/num.rs b/library/core/src/convert/num.rs
index a74a56bc5..56ab63be2 100644
--- a/library/core/src/convert/num.rs
+++ b/library/core/src/convert/num.rs
@@ -44,8 +44,7 @@ impl_float_to_int!(f64 => u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize);
macro_rules! impl_from {
($Small: ty, $Large: ty, #[$attr:meta], $doc: expr) => {
#[$attr]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const From<$Small> for $Large {
+ impl From<$Small> for $Large {
// Rustdocs on the impl block show a "[+] show undocumented items" toggle.
// Rustdocs on functions do not.
#[doc = $doc]
@@ -170,8 +169,7 @@ impl_from! { f32, f64, #[stable(feature = "lossless_float_conv", since = "1.6.0"
// bool -> Float
#[stable(feature = "float_from_bool", since = "1.68.0")]
-#[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
-impl const From<bool> for f32 {
+impl From<bool> for f32 {
/// Converts `bool` to `f32` losslessly. The resulting value is positive
/// `0.0` for `false` and `1.0` for `true` values.
///
@@ -190,8 +188,7 @@ impl const From<bool> for f32 {
}
}
#[stable(feature = "float_from_bool", since = "1.68.0")]
-#[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
-impl const From<bool> for f64 {
+impl From<bool> for f64 {
/// Converts `bool` to `f64` losslessly. The resulting value is positive
/// `0.0` for `false` and `1.0` for `true` values.
///
@@ -214,8 +211,7 @@ impl const From<bool> for f64 {
macro_rules! try_from_unbounded {
($source:ty, $($target:ty),*) => {$(
#[stable(feature = "try_from", since = "1.34.0")]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const TryFrom<$source> for $target {
+ impl TryFrom<$source> for $target {
type Error = TryFromIntError;
/// Try to create the target number type from a source
@@ -233,8 +229,7 @@ macro_rules! try_from_unbounded {
macro_rules! try_from_lower_bounded {
($source:ty, $($target:ty),*) => {$(
#[stable(feature = "try_from", since = "1.34.0")]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const TryFrom<$source> for $target {
+ impl TryFrom<$source> for $target {
type Error = TryFromIntError;
/// Try to create the target number type from a source
@@ -256,8 +251,7 @@ macro_rules! try_from_lower_bounded {
macro_rules! try_from_upper_bounded {
($source:ty, $($target:ty),*) => {$(
#[stable(feature = "try_from", since = "1.34.0")]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const TryFrom<$source> for $target {
+ impl TryFrom<$source> for $target {
type Error = TryFromIntError;
/// Try to create the target number type from a source
@@ -279,8 +273,7 @@ macro_rules! try_from_upper_bounded {
macro_rules! try_from_both_bounded {
($source:ty, $($target:ty),*) => {$(
#[stable(feature = "try_from", since = "1.34.0")]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const TryFrom<$source> for $target {
+ impl TryFrom<$source> for $target {
type Error = TryFromIntError;
/// Try to create the target number type from a source
@@ -431,8 +424,7 @@ use crate::num::NonZeroUsize;
macro_rules! nzint_impl_from {
($Small: ty, $Large: ty, #[$attr:meta], $doc: expr) => {
#[$attr]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const From<$Small> for $Large {
+ impl From<$Small> for $Large {
// Rustdocs on the impl block show a "[+] show undocumented items" toggle.
// Rustdocs on functions do not.
#[doc = $doc]
diff --git a/library/core/src/default.rs b/library/core/src/default.rs
index d96b53de0..09dbc9581 100644
--- a/library/core/src/default.rs
+++ b/library/core/src/default.rs
@@ -99,7 +99,6 @@
/// ```
#[cfg_attr(not(test), rustc_diagnostic_item = "Default")]
#[stable(feature = "rust1", since = "1.0.0")]
-#[const_trait]
pub trait Default: Sized {
/// Returns the "default value" for a type.
///
@@ -190,8 +189,7 @@ pub macro Default($item:item) {
macro_rules! default_impl {
($t:ty, $v:expr, $doc:tt) => {
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
- impl const Default for $t {
+ impl Default for $t {
#[inline]
#[doc = $doc]
fn default() -> $t {
diff --git a/library/core/src/escape.rs b/library/core/src/escape.rs
new file mode 100644
index 000000000..3d471419b
--- /dev/null
+++ b/library/core/src/escape.rs
@@ -0,0 +1,112 @@
+//! Helper code for character escaping.
+
+use crate::ascii;
+use crate::num::NonZeroUsize;
+use crate::ops::Range;
+
+const HEX_DIGITS: [ascii::Char; 16] = *b"0123456789abcdef".as_ascii().unwrap();
+
+/// Escapes a byte into provided buffer; returns length of escaped
+/// representation.
+pub(crate) fn escape_ascii_into(output: &mut [ascii::Char; 4], byte: u8) -> Range<u8> {
+ #[inline]
+ fn backslash(a: ascii::Char) -> ([ascii::Char; 4], u8) {
+ ([ascii::Char::ReverseSolidus, a, ascii::Char::Null, ascii::Char::Null], 2)
+ }
+
+ let (data, len) = match byte {
+ b'\t' => backslash(ascii::Char::SmallT),
+ b'\r' => backslash(ascii::Char::SmallR),
+ b'\n' => backslash(ascii::Char::SmallN),
+ b'\\' => backslash(ascii::Char::ReverseSolidus),
+ b'\'' => backslash(ascii::Char::Apostrophe),
+ b'\"' => backslash(ascii::Char::QuotationMark),
+ _ => if let Some(a) = byte.as_ascii() && !byte.is_ascii_control() {
+ ([a, ascii::Char::Null, ascii::Char::Null, ascii::Char::Null], 1)
+ } else {
+ let hi = HEX_DIGITS[usize::from(byte >> 4)];
+ let lo = HEX_DIGITS[usize::from(byte & 0xf)];
+ ([ascii::Char::ReverseSolidus, ascii::Char::SmallX, hi, lo], 4)
+ }
+ };
+ *output = data;
+ 0..len
+}
+
+/// Escapes a character into provided buffer using `\u{NNNN}` representation.
+pub(crate) fn escape_unicode_into(output: &mut [ascii::Char; 10], ch: char) -> Range<u8> {
+ output[9] = ascii::Char::RightCurlyBracket;
+
+ let ch = ch as u32;
+ output[3] = HEX_DIGITS[((ch >> 20) & 15) as usize];
+ output[4] = HEX_DIGITS[((ch >> 16) & 15) as usize];
+ output[5] = HEX_DIGITS[((ch >> 12) & 15) as usize];
+ output[6] = HEX_DIGITS[((ch >> 8) & 15) as usize];
+ output[7] = HEX_DIGITS[((ch >> 4) & 15) as usize];
+ output[8] = HEX_DIGITS[((ch >> 0) & 15) as usize];
+
+ // or-ing 1 ensures that for ch==0 the code computes that one digit should
+ // be printed.
+ let start = (ch | 1).leading_zeros() as usize / 4 - 2;
+ const UNICODE_ESCAPE_PREFIX: &[ascii::Char; 3] = b"\\u{".as_ascii().unwrap();
+ output[start..][..3].copy_from_slice(UNICODE_ESCAPE_PREFIX);
+
+ (start as u8)..10
+}
+
+/// An iterator over an fixed-size array.
+///
+/// This is essentially equivalent to array’s IntoIter except that indexes are
+/// limited to u8 to reduce size of the structure.
+#[derive(Clone, Debug)]
+pub(crate) struct EscapeIterInner<const N: usize> {
+ // The element type ensures this is always ASCII, and thus also valid UTF-8.
+ pub(crate) data: [ascii::Char; N],
+
+ // Invariant: alive.start <= alive.end <= N.
+ pub(crate) alive: Range<u8>,
+}
+
+impl<const N: usize> EscapeIterInner<N> {
+ pub fn new(data: [ascii::Char; N], alive: Range<u8>) -> Self {
+ const { assert!(N < 256) };
+ debug_assert!(alive.start <= alive.end && usize::from(alive.end) <= N, "{alive:?}");
+ Self { data, alive }
+ }
+
+ pub fn from_array<const M: usize>(array: [ascii::Char; M]) -> Self {
+ const { assert!(M <= N) };
+
+ let mut data = [ascii::Char::Null; N];
+ data[..M].copy_from_slice(&array);
+ Self::new(data, 0..M as u8)
+ }
+
+ pub fn as_ascii(&self) -> &[ascii::Char] {
+ &self.data[usize::from(self.alive.start)..usize::from(self.alive.end)]
+ }
+
+ pub fn as_str(&self) -> &str {
+ self.as_ascii().as_str()
+ }
+
+ pub fn len(&self) -> usize {
+ usize::from(self.alive.end - self.alive.start)
+ }
+
+ pub fn next(&mut self) -> Option<u8> {
+ self.alive.next().map(|i| self.data[usize::from(i)].as_u8())
+ }
+
+ pub fn next_back(&mut self) -> Option<u8> {
+ self.alive.next_back().map(|i| self.data[usize::from(i)].as_u8())
+ }
+
+ pub fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
+ self.alive.advance_by(n)
+ }
+
+ pub fn advance_back_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
+ self.alive.advance_back_by(n)
+ }
+}
diff --git a/library/core/src/ffi/c_str.rs b/library/core/src/ffi/c_str.rs
index 4a5306cca..e1e1a9b40 100644
--- a/library/core/src/ffi/c_str.rs
+++ b/library/core/src/ffi/c_str.rs
@@ -79,9 +79,9 @@ use crate::str;
///
/// [str]: prim@str "str"
#[derive(Hash)]
-#[cfg_attr(not(test), rustc_diagnostic_item = "CStr")]
#[stable(feature = "core_c_str", since = "1.64.0")]
#[rustc_has_incoherent_inherent_impls]
+#[cfg_attr(not(bootstrap), lang = "CStr")]
// FIXME:
// `fn from` in `impl From<&CStr> for Box<CStr>` current implementation relies
// on `CStr` being layout-compatible with `[u8]`.
@@ -324,14 +324,15 @@ impl CStr {
/// assert_eq!(c_str.to_str().unwrap(), "AAAAAAAA");
/// ```
///
- #[rustc_allow_const_fn_unstable(const_slice_index)]
#[stable(feature = "cstr_from_bytes_until_nul", since = "1.69.0")]
#[rustc_const_stable(feature = "cstr_from_bytes_until_nul", since = "1.69.0")]
pub const fn from_bytes_until_nul(bytes: &[u8]) -> Result<&CStr, FromBytesUntilNulError> {
let nul_pos = memchr::memchr(0, bytes);
match nul_pos {
Some(nul_pos) => {
- let subslice = &bytes[..nul_pos + 1];
+ // FIXME(const-hack) replace with range index
+ // SAFETY: nul_pos + 1 <= bytes.len()
+ let subslice = unsafe { crate::slice::from_raw_parts(bytes.as_ptr(), nul_pos + 1) };
// SAFETY: We know there is a nul byte at nul_pos, so this slice
// (ending at the nul byte) is a well-formed C string.
Ok(unsafe { CStr::from_bytes_with_nul_unchecked(subslice) })
@@ -516,8 +517,6 @@ impl CStr {
/// # Examples
///
/// ```
- /// #![feature(cstr_is_empty)]
- ///
/// use std::ffi::CStr;
/// # use std::ffi::FromBytesWithNulError;
///
@@ -532,11 +531,13 @@ impl CStr {
/// # }
/// ```
#[inline]
- #[unstable(feature = "cstr_is_empty", issue = "102444")]
+ #[stable(feature = "cstr_is_empty", since = "1.71.0")]
+ #[rustc_const_stable(feature = "cstr_is_empty", since = "1.71.0")]
pub const fn is_empty(&self) -> bool {
// SAFETY: We know there is at least one byte; for empty strings it
// is the NUL terminator.
- (unsafe { self.inner.get_unchecked(0) }) == &0
+ // FIXME(const-hack): use get_unchecked
+ unsafe { *self.inner.as_ptr() == 0 }
}
/// Converts this C string to a byte slice.
@@ -560,8 +561,7 @@ impl CStr {
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cstr_methods", issue = "101719")]
- pub const fn to_bytes(&self) -> &[u8] {
+ pub fn to_bytes(&self) -> &[u8] {
let bytes = self.to_bytes_with_nul();
// SAFETY: to_bytes_with_nul returns slice with length at least 1
unsafe { bytes.get_unchecked(..bytes.len() - 1) }
@@ -612,8 +612,7 @@ impl CStr {
/// assert_eq!(cstr.to_str(), Ok("foo"));
/// ```
#[stable(feature = "cstr_to_str", since = "1.4.0")]
- #[rustc_const_unstable(feature = "const_cstr_methods", issue = "101719")]
- pub const fn to_str(&self) -> Result<&str, str::Utf8Error> {
+ pub fn to_str(&self) -> Result<&str, str::Utf8Error> {
// N.B., when `CStr` is changed to perform the length check in `.to_bytes()`
// instead of in `from_ptr()`, it may be worth considering if this should
// be rewritten to do the UTF-8 check inline with the length calculation
diff --git a/library/core/src/ffi/mod.rs b/library/core/src/ffi/mod.rs
index c4f554c8c..b73abbbac 100644
--- a/library/core/src/ffi/mod.rs
+++ b/library/core/src/ffi/mod.rs
@@ -143,7 +143,10 @@ mod c_char_definition {
target_arch = "powerpc"
)
),
- all(target_os = "fuchsia", target_arch = "aarch64"),
+ all(
+ target_os = "fuchsia",
+ any(target_arch = "aarch64", target_arch = "riscv64")
+ ),
all(target_os = "nto", target_arch = "aarch64"),
target_os = "horizon"
))] {
@@ -199,7 +202,8 @@ mod c_long_definition {
// would be uninhabited and at least dereferencing such pointers would
// be UB.
#[doc = include_str!("c_void.md")]
-#[repr(u8)]
+#[cfg_attr(not(bootstrap), lang = "c_void")]
+#[cfg_attr(not(doc), repr(u8))] // work around https://github.com/rust-lang/rust/issues/90435
#[stable(feature = "core_c_void", since = "1.30.0")]
pub enum c_void {
#[unstable(
@@ -240,7 +244,7 @@ impl fmt::Debug for c_void {
target_os = "uefi",
windows,
))]
-#[repr(transparent)]
+#[cfg_attr(not(doc), repr(transparent))] // work around https://github.com/rust-lang/rust/issues/90435
#[unstable(
feature = "c_variadic",
reason = "the `c_variadic` feature has not been properly tested on \
@@ -292,7 +296,7 @@ impl<'f> fmt::Debug for VaListImpl<'f> {
not(target_os = "uefi"),
not(windows),
))]
-#[repr(C)]
+#[cfg_attr(not(doc), repr(C))] // work around https://github.com/rust-lang/rust/issues/66401
#[derive(Debug)]
#[unstable(
feature = "c_variadic",
@@ -312,7 +316,7 @@ pub struct VaListImpl<'f> {
/// PowerPC ABI implementation of a `va_list`.
#[cfg(all(target_arch = "powerpc", not(target_os = "uefi"), not(windows)))]
-#[repr(C)]
+#[cfg_attr(not(doc), repr(C))] // work around https://github.com/rust-lang/rust/issues/66401
#[derive(Debug)]
#[unstable(
feature = "c_variadic",
@@ -332,7 +336,7 @@ pub struct VaListImpl<'f> {
/// s390x ABI implementation of a `va_list`.
#[cfg(target_arch = "s390x")]
-#[repr(C)]
+#[cfg_attr(not(doc), repr(C))] // work around https://github.com/rust-lang/rust/issues/66401
#[derive(Debug)]
#[unstable(
feature = "c_variadic",
@@ -351,7 +355,7 @@ pub struct VaListImpl<'f> {
/// x86_64 ABI implementation of a `va_list`.
#[cfg(all(target_arch = "x86_64", not(target_os = "uefi"), not(windows)))]
-#[repr(C)]
+#[cfg_attr(not(doc), repr(C))] // work around https://github.com/rust-lang/rust/issues/66401
#[derive(Debug)]
#[unstable(
feature = "c_variadic",
@@ -369,7 +373,7 @@ pub struct VaListImpl<'f> {
}
/// A wrapper for a `va_list`
-#[repr(transparent)]
+#[cfg_attr(not(doc), repr(transparent))] // work around https://github.com/rust-lang/rust/issues/90435
#[derive(Debug)]
#[unstable(
feature = "c_variadic",
diff --git a/library/core/src/fmt/builders.rs b/library/core/src/fmt/builders.rs
index 7da49b04a..36f49d51c 100644
--- a/library/core/src/fmt/builders.rs
+++ b/library/core/src/fmt/builders.rs
@@ -60,7 +60,7 @@ impl fmt::Write for PadAdapter<'_, '_> {
/// }
///
/// impl fmt::Debug for Foo {
-/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+/// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_struct("Foo")
/// .field("bar", &self.bar)
/// .field("baz", &self.baz)
@@ -109,14 +109,14 @@ impl<'a, 'b: 'a> DebugStruct<'a, 'b> {
/// .field("bar", &self.bar) // We add `bar` field.
/// .field("another", &self.another) // We add `another` field.
/// // We even add a field which doesn't exist (because why not?).
- /// .field("not_existing_field", &1)
+ /// .field("nonexistent_field", &1)
/// .finish() // We're good to go!
/// }
/// }
///
/// assert_eq!(
/// format!("{:?}", Bar { bar: 10, another: "Hello World".to_string() }),
- /// "Bar { bar: 10, another: \"Hello World\", not_existing_field: 1 }",
+ /// "Bar { bar: 10, another: \"Hello World\", nonexistent_field: 1 }",
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
@@ -249,7 +249,7 @@ impl<'a, 'b: 'a> DebugStruct<'a, 'b> {
/// struct Foo(i32, String);
///
/// impl fmt::Debug for Foo {
-/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+/// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_tuple("Foo")
/// .field(&self.0)
/// .field(&self.1)
@@ -418,7 +418,7 @@ impl<'a, 'b: 'a> DebugInner<'a, 'b> {
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
-/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+/// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_set().entries(self.0.iter()).finish()
/// }
/// }
@@ -548,7 +548,7 @@ impl<'a, 'b: 'a> DebugSet<'a, 'b> {
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
-/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+/// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_list().entries(self.0.iter()).finish()
/// }
/// }
@@ -678,7 +678,7 @@ impl<'a, 'b: 'a> DebugList<'a, 'b> {
/// struct Foo(Vec<(String, i32)>);
///
/// impl fmt::Debug for Foo {
-/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+/// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_map().entries(self.0.iter().map(|&(ref k, ref v)| (k, v))).finish()
/// }
/// }
diff --git a/library/core/src/fmt/float.rs b/library/core/src/fmt/float.rs
index 89d5fac30..3bbf5d877 100644
--- a/library/core/src/fmt/float.rs
+++ b/library/core/src/fmt/float.rs
@@ -45,7 +45,8 @@ where
&mut buf,
&mut parts,
);
- fmt.pad_formatted_parts(&formatted)
+ // SAFETY: `to_exact_fixed_str` and `format_exact` produce only ASCII characters.
+ unsafe { fmt.pad_formatted_parts(&formatted) }
}
// Don't inline this so callers that call both this and the above won't wind
@@ -71,7 +72,8 @@ where
&mut buf,
&mut parts,
);
- fmt.pad_formatted_parts(&formatted)
+ // SAFETY: `to_shortest_str` and `format_shortest` produce only ASCII characters.
+ unsafe { fmt.pad_formatted_parts(&formatted) }
}
fn float_to_decimal_display<T>(fmt: &mut Formatter<'_>, num: &T) -> Result
@@ -116,7 +118,8 @@ where
&mut buf,
&mut parts,
);
- fmt.pad_formatted_parts(&formatted)
+ // SAFETY: `to_exact_exp_str` and `format_exact` produce only ASCII characters.
+ unsafe { fmt.pad_formatted_parts(&formatted) }
}
// Don't inline this so callers that call both this and the above won't wind
@@ -143,7 +146,8 @@ where
&mut buf,
&mut parts,
);
- fmt.pad_formatted_parts(&formatted)
+ // SAFETY: `to_shortest_exp_str` and `format_shortest` produce only ASCII characters.
+ unsafe { fmt.pad_formatted_parts(&formatted) }
}
// Common code of floating point LowerExp and UpperExp.
diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs
index fcda097f0..1786b309c 100644
--- a/library/core/src/fmt/mod.rs
+++ b/library/core/src/fmt/mod.rs
@@ -18,6 +18,7 @@ mod float;
#[cfg(no_fp_fmt_parse)]
mod nofloat;
mod num;
+mod rt;
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Alignment")]
@@ -38,12 +39,6 @@ pub enum Alignment {
#[stable(feature = "debug_builders", since = "1.2.0")]
pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
-#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
-#[doc(hidden)]
-pub mod rt {
- pub mod v1;
-}
-
/// The type returned by formatter methods.
///
/// # Examples
@@ -227,7 +222,7 @@ impl<W: Write + ?Sized> Write for &mut W {
pub struct Formatter<'a> {
flags: u32,
fill: char,
- align: rt::v1::Alignment,
+ align: rt::Alignment,
width: Option<usize>,
precision: Option<usize>,
@@ -248,7 +243,7 @@ impl<'a> Formatter<'a> {
Formatter {
flags: 0,
fill: ' ',
- align: rt::v1::Alignment::Unknown,
+ align: rt::Alignment::Unknown,
width: None,
precision: None,
buf,
@@ -256,145 +251,48 @@ impl<'a> Formatter<'a> {
}
}
-// NB. Argument is essentially an optimized partially applied formatting function,
-// equivalent to `exists T.(&T, fn(&T, &mut Formatter<'_>) -> Result`.
-
-extern "C" {
- type Opaque;
-}
-
-/// This struct represents the generic "argument" which is taken by the Xprintf
-/// family of functions. It contains a function to format the given value. At
-/// compile time it is ensured that the function and the value have the correct
-/// types, and then this struct is used to canonicalize arguments to one type.
-#[lang = "format_argument"]
+/// This structure represents a safely precompiled version of a format string
+/// and its arguments. This cannot be generated at runtime because it cannot
+/// safely be done, so no constructors are given and the fields are private
+/// to prevent modification.
+///
+/// The [`format_args!`] macro will safely create an instance of this structure.
+/// The macro validates the format string at compile-time so usage of the
+/// [`write()`] and [`format()`] functions can be safely performed.
+///
+/// You can use the `Arguments<'a>` that [`format_args!`] returns in `Debug`
+/// and `Display` contexts as seen below. The example also shows that `Debug`
+/// and `Display` format to the same thing: the interpolated format string
+/// in `format_args!`.
+///
+/// ```rust
+/// let debug = format!("{:?}", format_args!("{} foo {:?}", 1, 2));
+/// let display = format!("{}", format_args!("{} foo {:?}", 1, 2));
+/// assert_eq!("1 foo 2", display);
+/// assert_eq!(display, debug);
+/// ```
+///
+/// [`format()`]: ../../std/fmt/fn.format.html
+#[lang = "format_arguments"]
+#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone)]
-#[allow(missing_debug_implementations)]
-#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
-#[doc(hidden)]
-pub struct ArgumentV1<'a> {
- value: &'a Opaque,
- formatter: fn(&Opaque, &mut Formatter<'_>) -> Result,
-}
-
-/// This struct represents the unsafety of constructing an `Arguments`.
-/// It exists, rather than an unsafe function, in order to simplify the expansion
-/// of `format_args!(..)` and reduce the scope of the `unsafe` block.
-#[lang = "format_unsafe_arg"]
-#[allow(missing_debug_implementations)]
-#[doc(hidden)]
-#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
-pub struct UnsafeArg {
- _private: (),
-}
-
-impl UnsafeArg {
- /// See documentation where `UnsafeArg` is required to know when it is safe to
- /// create and use `UnsafeArg`.
- #[doc(hidden)]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
- #[inline(always)]
- pub unsafe fn new() -> Self {
- Self { _private: () }
- }
-}
-
-// This guarantees a single stable value for the function pointer associated with
-// indices/counts in the formatting infrastructure.
-//
-// Note that a function defined as such would not be correct as functions are
-// always tagged unnamed_addr with the current lowering to LLVM IR, so their
-// address is not considered important to LLVM and as such the as_usize cast
-// could have been miscompiled. In practice, we never call as_usize on non-usize
-// containing data (as a matter of static generation of the formatting
-// arguments), so this is merely an additional check.
-//
-// We primarily want to ensure that the function pointer at `USIZE_MARKER` has
-// an address corresponding *only* to functions that also take `&usize` as their
-// first argument. The read_volatile here ensures that we can safely ready out a
-// usize from the passed reference and that this address does not point at a
-// non-usize taking function.
-#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
-static USIZE_MARKER: fn(&usize, &mut Formatter<'_>) -> Result = |ptr, _| {
- // SAFETY: ptr is a reference
- let _v: usize = unsafe { crate::ptr::read_volatile(ptr) };
- loop {}
-};
-
-macro_rules! arg_new {
- ($f: ident, $t: ident) => {
- #[doc(hidden)]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
- #[inline]
- pub fn $f<'b, T: $t>(x: &'b T) -> ArgumentV1<'_> {
- Self::new(x, $t::fmt)
- }
- };
-}
-
-#[rustc_diagnostic_item = "ArgumentV1Methods"]
-impl<'a> ArgumentV1<'a> {
- #[doc(hidden)]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
- #[inline]
- pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> ArgumentV1<'b> {
- // SAFETY: `mem::transmute(x)` is safe because
- // 1. `&'b T` keeps the lifetime it originated with `'b`
- // (so as to not have an unbounded lifetime)
- // 2. `&'b T` and `&'b Opaque` have the same memory layout
- // (when `T` is `Sized`, as it is here)
- // `mem::transmute(f)` is safe since `fn(&T, &mut Formatter<'_>) -> Result`
- // and `fn(&Opaque, &mut Formatter<'_>) -> Result` have the same ABI
- // (as long as `T` is `Sized`)
- unsafe { ArgumentV1 { formatter: mem::transmute(f), value: mem::transmute(x) } }
- }
-
- arg_new!(new_display, Display);
- arg_new!(new_debug, Debug);
- arg_new!(new_octal, Octal);
- arg_new!(new_lower_hex, LowerHex);
- arg_new!(new_upper_hex, UpperHex);
- arg_new!(new_pointer, Pointer);
- arg_new!(new_binary, Binary);
- arg_new!(new_lower_exp, LowerExp);
- arg_new!(new_upper_exp, UpperExp);
+pub struct Arguments<'a> {
+ // Format string pieces to print.
+ pieces: &'a [&'static str],
- #[doc(hidden)]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
- pub fn from_usize(x: &usize) -> ArgumentV1<'_> {
- ArgumentV1::new(x, USIZE_MARKER)
- }
-
- fn as_usize(&self) -> Option<usize> {
- // We are type punning a bit here: USIZE_MARKER only takes an &usize but
- // formatter takes an &Opaque. Rust understandably doesn't think we should compare
- // the function pointers if they don't have the same signature, so we cast to
- // usizes to tell it that we just want to compare addresses.
- if self.formatter as usize == USIZE_MARKER as usize {
- // SAFETY: The `formatter` field is only set to USIZE_MARKER if
- // the value is a usize, so this is safe
- Some(unsafe { *(self.value as *const _ as *const usize) })
- } else {
- None
- }
- }
-}
+ // Placeholder specs, or `None` if all specs are default (as in "{}{}").
+ fmt: Option<&'a [rt::Placeholder]>,
-// flags available in the v1 format of format_args
-#[derive(Copy, Clone)]
-enum FlagV1 {
- SignPlus,
- SignMinus,
- Alternate,
- SignAwareZeroPad,
- DebugLowerHex,
- DebugUpperHex,
+ // Dynamic arguments for interpolation, to be interleaved with string
+ // pieces. (Every argument is preceded by a string piece.)
+ args: &'a [rt::Argument<'a>],
}
+/// Used by the format_args!() macro to create a fmt::Arguments object.
+#[doc(hidden)]
+#[unstable(feature = "fmt_internals", issue = "none")]
impl<'a> Arguments<'a> {
- #[doc(hidden)]
#[inline]
- #[unstable(feature = "fmt_internals", issue = "none")]
#[rustc_const_unstable(feature = "const_fmt_arguments_new", issue = "none")]
pub const fn new_const(pieces: &'a [&'static str]) -> Self {
if pieces.len() > 1 {
@@ -405,23 +303,8 @@ impl<'a> Arguments<'a> {
/// When using the format_args!() macro, this function is used to generate the
/// Arguments structure.
- #[cfg(not(bootstrap))]
- #[doc(hidden)]
#[inline]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
- pub fn new_v1(pieces: &'a [&'static str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
- if pieces.len() < args.len() || pieces.len() > args.len() + 1 {
- panic!("invalid args");
- }
- Arguments { pieces, fmt: None, args }
- }
-
- #[cfg(bootstrap)]
- #[doc(hidden)]
- #[inline]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
- #[rustc_const_unstable(feature = "const_fmt_arguments_new", issue = "none")]
- pub const fn new_v1(pieces: &'a [&'static str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
+ pub fn new_v1(pieces: &'a [&'static str], args: &'a [rt::Argument<'a>]) -> Arguments<'a> {
if pieces.len() < args.len() || pieces.len() > args.len() + 1 {
panic!("invalid args");
}
@@ -430,21 +313,17 @@ impl<'a> Arguments<'a> {
/// This function is used to specify nonstandard formatting parameters.
///
- /// An `UnsafeArg` is required because the following invariants must be held
+ /// An `rt::UnsafeArg` is required because the following invariants must be held
/// in order for this function to be safe:
/// 1. The `pieces` slice must be at least as long as `fmt`.
- /// 2. Every [`rt::v1::Argument::position`] value within `fmt` must be a
- /// valid index of `args`.
- /// 3. Every [`rt::v1::Count::Param`] within `fmt` must contain a valid index of
- /// `args`.
- #[doc(hidden)]
+ /// 2. Every `rt::Placeholder::position` value within `fmt` must be a valid index of `args`.
+ /// 3. Every `rt::Count::Param` within `fmt` must contain a valid index of `args`.
#[inline]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub fn new_v1_formatted(
pieces: &'a [&'static str],
- args: &'a [ArgumentV1<'a>],
- fmt: &'a [rt::v1::Argument],
- _unsafe_arg: UnsafeArg,
+ args: &'a [rt::Argument<'a>],
+ fmt: &'a [rt::Placeholder],
+ _unsafe_arg: rt::UnsafeArg,
) -> Arguments<'a> {
Arguments { pieces, fmt: Some(fmt), args }
}
@@ -453,9 +332,7 @@ impl<'a> Arguments<'a> {
///
/// This is intended to be used for setting initial `String` capacity
/// when using `format!`. Note: this is neither the lower nor upper bound.
- #[doc(hidden)]
#[inline]
- #[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub fn estimated_capacity(&self) -> usize {
let pieces_length: usize = self.pieces.iter().map(|x| x.len()).sum();
@@ -475,43 +352,6 @@ impl<'a> Arguments<'a> {
}
}
-/// This structure represents a safely precompiled version of a format string
-/// and its arguments. This cannot be generated at runtime because it cannot
-/// safely be done, so no constructors are given and the fields are private
-/// to prevent modification.
-///
-/// The [`format_args!`] macro will safely create an instance of this structure.
-/// The macro validates the format string at compile-time so usage of the
-/// [`write()`] and [`format()`] functions can be safely performed.
-///
-/// You can use the `Arguments<'a>` that [`format_args!`] returns in `Debug`
-/// and `Display` contexts as seen below. The example also shows that `Debug`
-/// and `Display` format to the same thing: the interpolated format string
-/// in `format_args!`.
-///
-/// ```rust
-/// let debug = format!("{:?}", format_args!("{} foo {:?}", 1, 2));
-/// let display = format!("{}", format_args!("{} foo {:?}", 1, 2));
-/// assert_eq!("1 foo 2", display);
-/// assert_eq!(display, debug);
-/// ```
-///
-/// [`format()`]: ../../std/fmt/fn.format.html
-#[lang = "format_arguments"]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Copy, Clone)]
-pub struct Arguments<'a> {
- // Format string pieces to print.
- pieces: &'a [&'static str],
-
- // Placeholder specs, or `None` if all specs are default (as in "{}{}").
- fmt: Option<&'a [rt::v1::Argument]>,
-
- // Dynamic arguments for interpolation, to be interleaved with string
- // pieces. (Every argument is preceded by a string piece.)
- args: &'a [ArgumentV1<'a>],
-}
-
impl<'a> Arguments<'a> {
/// Get the formatted string, if it has no arguments to be formatted at runtime.
///
@@ -541,7 +381,7 @@ impl<'a> Arguments<'a> {
///
/// fn write_str(_: &str) { /* ... */ }
///
- /// fn write_fmt(args: &Arguments) {
+ /// fn write_fmt(args: &Arguments<'_>) {
/// if let Some(s) = args.as_str() {
/// write_str(s)
/// } else {
@@ -1251,7 +1091,7 @@ pub fn write(output: &mut dyn Write, args: Arguments<'_>) -> Result {
if !piece.is_empty() {
formatter.buf.write_str(*piece)?;
}
- (arg.formatter)(arg.value, &mut formatter)?;
+ arg.fmt(&mut formatter)?;
idx += 1;
}
}
@@ -1281,15 +1121,15 @@ pub fn write(output: &mut dyn Write, args: Arguments<'_>) -> Result {
Ok(())
}
-unsafe fn run(fmt: &mut Formatter<'_>, arg: &rt::v1::Argument, args: &[ArgumentV1<'_>]) -> Result {
- fmt.fill = arg.format.fill;
- fmt.align = arg.format.align;
- fmt.flags = arg.format.flags;
+unsafe fn run(fmt: &mut Formatter<'_>, arg: &rt::Placeholder, args: &[rt::Argument<'_>]) -> Result {
+ fmt.fill = arg.fill;
+ fmt.align = arg.align;
+ fmt.flags = arg.flags;
// SAFETY: arg and args come from the same Arguments,
// which guarantees the indexes are always within bounds.
unsafe {
- fmt.width = getcount(args, &arg.format.width);
- fmt.precision = getcount(args, &arg.format.precision);
+ fmt.width = getcount(args, &arg.width);
+ fmt.precision = getcount(args, &arg.precision);
}
// Extract the correct argument
@@ -1299,14 +1139,14 @@ unsafe fn run(fmt: &mut Formatter<'_>, arg: &rt::v1::Argument, args: &[ArgumentV
let value = unsafe { args.get_unchecked(arg.position) };
// Then actually do some printing
- (value.formatter)(value.value, fmt)
+ value.fmt(fmt)
}
-unsafe fn getcount(args: &[ArgumentV1<'_>], cnt: &rt::v1::Count) -> Option<usize> {
+unsafe fn getcount(args: &[rt::Argument<'_>], cnt: &rt::Count) -> Option<usize> {
match *cnt {
- rt::v1::Count::Is(n) => Some(n),
- rt::v1::Count::Implied => None,
- rt::v1::Count::Param(i) => {
+ rt::Count::Is(n) => Some(n),
+ rt::Count::Implied => None,
+ rt::Count::Param(i) => {
debug_assert!(i < args.len());
// SAFETY: cnt and args come from the same Arguments,
// which guarantees this index is always within bounds.
@@ -1388,7 +1228,7 @@ impl<'a> Formatter<'a> {
/// }
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// // We need to remove "-" from the number output.
/// let tmp = self.nb.abs().to_string();
///
@@ -1449,9 +1289,9 @@ impl<'a> Formatter<'a> {
// is zero
Some(min) if self.sign_aware_zero_pad() => {
let old_fill = crate::mem::replace(&mut self.fill, '0');
- let old_align = crate::mem::replace(&mut self.align, rt::v1::Alignment::Right);
+ let old_align = crate::mem::replace(&mut self.align, rt::Alignment::Right);
write_prefix(self, sign, prefix)?;
- let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
+ let post_padding = self.padding(min - width, Alignment::Right)?;
self.buf.write_str(buf)?;
post_padding.write(self)?;
self.fill = old_fill;
@@ -1460,7 +1300,7 @@ impl<'a> Formatter<'a> {
}
// Otherwise, the sign and prefix goes after the padding
Some(min) => {
- let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
+ let post_padding = self.padding(min - width, Alignment::Right)?;
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)?;
post_padding.write(self)
@@ -1488,7 +1328,7 @@ impl<'a> Formatter<'a> {
/// struct Foo;
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// formatter.pad("Foo")
/// }
/// }
@@ -1535,7 +1375,7 @@ impl<'a> Formatter<'a> {
// If we're under both the maximum and the minimum width, then fill
// up the minimum width with the specified string + some alignment.
else {
- let align = rt::v1::Alignment::Left;
+ let align = Alignment::Left;
let post_padding = self.padding(width - chars_count, align)?;
self.buf.write_str(s)?;
post_padding.write(self)
@@ -1550,17 +1390,19 @@ impl<'a> Formatter<'a> {
pub(crate) fn padding(
&mut self,
padding: usize,
- default: rt::v1::Alignment,
+ default: Alignment,
) -> result::Result<PostPadding, Error> {
let align = match self.align {
- rt::v1::Alignment::Unknown => default,
- _ => self.align,
+ rt::Alignment::Unknown => default,
+ rt::Alignment::Left => Alignment::Left,
+ rt::Alignment::Right => Alignment::Right,
+ rt::Alignment::Center => Alignment::Center,
};
let (pre_pad, post_pad) = match align {
- rt::v1::Alignment::Left => (0, padding),
- rt::v1::Alignment::Right | rt::v1::Alignment::Unknown => (padding, 0),
- rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2),
+ Alignment::Left => (0, padding),
+ Alignment::Right => (padding, 0),
+ Alignment::Center => (padding / 2, (padding + 1) / 2),
};
for _ in 0..pre_pad {
@@ -1573,14 +1415,17 @@ impl<'a> Formatter<'a> {
/// Takes the formatted parts and applies the padding.
/// Assumes that the caller already has rendered the parts with required precision,
/// so that `self.precision` can be ignored.
- fn pad_formatted_parts(&mut self, formatted: &numfmt::Formatted<'_>) -> Result {
+ ///
+ /// # Safety
+ ///
+ /// Any `numfmt::Part::Copy` parts in `formatted` must contain valid UTF-8.
+ unsafe fn pad_formatted_parts(&mut self, formatted: &numfmt::Formatted<'_>) -> Result {
if let Some(mut width) = self.width {
// for the sign-aware zero padding, we render the sign first and
// behave as if we had no sign from the beginning.
let mut formatted = formatted.clone();
let old_fill = self.fill;
let old_align = self.align;
- let mut align = old_align;
if self.sign_aware_zero_pad() {
// a sign always goes first
let sign = formatted.sign;
@@ -1589,19 +1434,22 @@ impl<'a> Formatter<'a> {
// remove the sign from the formatted parts
formatted.sign = "";
width = width.saturating_sub(sign.len());
- align = rt::v1::Alignment::Right;
self.fill = '0';
- self.align = rt::v1::Alignment::Right;
+ self.align = rt::Alignment::Right;
}
// remaining parts go through the ordinary padding process.
let len = formatted.len();
let ret = if width <= len {
// no padding
- self.write_formatted_parts(&formatted)
+ // SAFETY: Per the precondition.
+ unsafe { self.write_formatted_parts(&formatted) }
} else {
- let post_padding = self.padding(width - len, align)?;
- self.write_formatted_parts(&formatted)?;
+ let post_padding = self.padding(width - len, Alignment::Right)?;
+ // SAFETY: Per the precondition.
+ unsafe {
+ self.write_formatted_parts(&formatted)?;
+ }
post_padding.write(self)
};
self.fill = old_fill;
@@ -1609,20 +1457,20 @@ impl<'a> Formatter<'a> {
ret
} else {
// this is the common case and we take a shortcut
- self.write_formatted_parts(formatted)
+ // SAFETY: Per the precondition.
+ unsafe { self.write_formatted_parts(formatted) }
}
}
- fn write_formatted_parts(&mut self, formatted: &numfmt::Formatted<'_>) -> Result {
- fn write_bytes(buf: &mut dyn Write, s: &[u8]) -> Result {
+ /// # Safety
+ ///
+ /// Any `numfmt::Part::Copy` parts in `formatted` must contain valid UTF-8.
+ unsafe fn write_formatted_parts(&mut self, formatted: &numfmt::Formatted<'_>) -> Result {
+ unsafe fn write_bytes(buf: &mut dyn Write, s: &[u8]) -> Result {
// SAFETY: This is used for `numfmt::Part::Num` and `numfmt::Part::Copy`.
// It's safe to use for `numfmt::Part::Num` since every char `c` is between
- // `b'0'` and `b'9'`, which means `s` is valid UTF-8.
- // It's also probably safe in practice to use for `numfmt::Part::Copy(buf)`
- // since `buf` should be plain ASCII, but it's possible for someone to pass
- // in a bad value for `buf` into `numfmt::to_shortest_str` since it is a
- // public function.
- // FIXME: Determine whether this could result in UB.
+ // `b'0'` and `b'9'`, which means `s` is valid UTF-8. It's safe to use for
+ // `numfmt::Part::Copy` due to this function's precondition.
buf.write_str(unsafe { str::from_utf8_unchecked(s) })
}
@@ -1649,11 +1497,15 @@ impl<'a> Formatter<'a> {
*c = b'0' + (v % 10) as u8;
v /= 10;
}
- write_bytes(self.buf, &s[..len])?;
+ // SAFETY: Per the precondition.
+ unsafe {
+ write_bytes(self.buf, &s[..len])?;
+ }
}
- numfmt::Part::Copy(buf) => {
+ // SAFETY: Per the precondition.
+ numfmt::Part::Copy(buf) => unsafe {
write_bytes(self.buf, buf)?;
- }
+ },
}
}
Ok(())
@@ -1670,7 +1522,7 @@ impl<'a> Formatter<'a> {
/// struct Foo;
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// formatter.write_str("Foo")
/// // This is equivalent to:
/// // write!(formatter, "Foo")
@@ -1695,7 +1547,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// formatter.write_fmt(format_args!("Foo {}", self.0))
/// }
/// }
@@ -1730,7 +1582,7 @@ impl<'a> Formatter<'a> {
/// struct Foo;
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let c = formatter.fill();
/// if let Some(width) = formatter.width() {
/// for _ in 0..width {
@@ -1758,14 +1610,12 @@ impl<'a> Formatter<'a> {
/// # Examples
///
/// ```
- /// extern crate core;
- ///
/// use std::fmt::{self, Alignment};
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let s = if let Some(s) = formatter.align() {
/// match s {
/// Alignment::Left => "left",
@@ -1788,10 +1638,10 @@ impl<'a> Formatter<'a> {
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
pub fn align(&self) -> Option<Alignment> {
match self.align {
- rt::v1::Alignment::Left => Some(Alignment::Left),
- rt::v1::Alignment::Right => Some(Alignment::Right),
- rt::v1::Alignment::Center => Some(Alignment::Center),
- rt::v1::Alignment::Unknown => None,
+ rt::Alignment::Left => Some(Alignment::Left),
+ rt::Alignment::Right => Some(Alignment::Right),
+ rt::Alignment::Center => Some(Alignment::Center),
+ rt::Alignment::Unknown => None,
}
}
@@ -1805,7 +1655,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// if let Some(width) = formatter.width() {
/// // If we received a width, we use it
/// write!(formatter, "{:width$}", format!("Foo({})", self.0), width = width)
@@ -1836,7 +1686,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(f32);
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// if let Some(precision) = formatter.precision() {
/// // If we received a precision, we use it.
/// write!(formatter, "Foo({1:.*})", precision, self.0)
@@ -1866,7 +1716,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// if formatter.sign_plus() {
/// write!(formatter,
/// "Foo({}{})",
@@ -1885,7 +1735,7 @@ impl<'a> Formatter<'a> {
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_plus(&self) -> bool {
- self.flags & (1 << FlagV1::SignPlus as u32) != 0
+ self.flags & (1 << rt::Flag::SignPlus as u32) != 0
}
/// Determines if the `-` flag was specified.
@@ -1898,7 +1748,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// if formatter.sign_minus() {
/// // You want a minus sign? Have one!
/// write!(formatter, "-Foo({})", self.0)
@@ -1914,7 +1764,7 @@ impl<'a> Formatter<'a> {
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_minus(&self) -> bool {
- self.flags & (1 << FlagV1::SignMinus as u32) != 0
+ self.flags & (1 << rt::Flag::SignMinus as u32) != 0
}
/// Determines if the `#` flag was specified.
@@ -1927,7 +1777,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// if formatter.alternate() {
/// write!(formatter, "Foo({})", self.0)
/// } else {
@@ -1942,7 +1792,7 @@ impl<'a> Formatter<'a> {
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn alternate(&self) -> bool {
- self.flags & (1 << FlagV1::Alternate as u32) != 0
+ self.flags & (1 << rt::Flag::Alternate as u32) != 0
}
/// Determines if the `0` flag was specified.
@@ -1955,7 +1805,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
- /// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// assert!(formatter.sign_aware_zero_pad());
/// assert_eq!(formatter.width(), Some(4));
/// // We ignore the formatter's options.
@@ -1968,17 +1818,17 @@ impl<'a> Formatter<'a> {
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_aware_zero_pad(&self) -> bool {
- self.flags & (1 << FlagV1::SignAwareZeroPad as u32) != 0
+ self.flags & (1 << rt::Flag::SignAwareZeroPad as u32) != 0
}
// FIXME: Decide what public API we want for these two flags.
// https://github.com/rust-lang/rust/issues/48584
fn debug_lower_hex(&self) -> bool {
- self.flags & (1 << FlagV1::DebugLowerHex as u32) != 0
+ self.flags & (1 << rt::Flag::DebugLowerHex as u32) != 0
}
fn debug_upper_hex(&self) -> bool {
- self.flags & (1 << FlagV1::DebugUpperHex as u32) != 0
+ self.flags & (1 << rt::Flag::DebugUpperHex as u32) != 0
}
/// Creates a [`DebugStruct`] builder designed to assist with creation of
@@ -1999,7 +1849,7 @@ impl<'a> Formatter<'a> {
/// }
///
/// impl fmt::Debug for Foo {
- /// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_struct("Foo")
/// .field("bar", &self.bar)
/// .field("baz", &self.baz)
@@ -2157,7 +2007,7 @@ impl<'a> Formatter<'a> {
/// struct Foo<T>(i32, String, PhantomData<T>);
///
/// impl<T> fmt::Debug for Foo<T> {
- /// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_tuple("Foo")
/// .field(&self.0)
/// .field(&self.1)
@@ -2289,7 +2139,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
- /// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_list().entries(self.0.iter()).finish()
/// }
/// }
@@ -2312,7 +2162,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
- /// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_set().entries(self.0.iter()).finish()
/// }
/// }
@@ -2328,14 +2178,14 @@ impl<'a> Formatter<'a> {
/// ```rust
/// use std::fmt;
///
- /// struct Arm<'a, L: 'a, R: 'a>(&'a (L, R));
- /// struct Table<'a, K: 'a, V: 'a>(&'a [(K, V)], V);
+ /// struct Arm<'a, L, R>(&'a (L, R));
+ /// struct Table<'a, K, V>(&'a [(K, V)], V);
///
/// impl<'a, L, R> fmt::Debug for Arm<'a, L, R>
/// where
/// L: 'a + fmt::Debug, R: 'a + fmt::Debug
/// {
- /// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// L::fmt(&(self.0).0, fmt)?;
/// fmt.write_str(" => ")?;
/// R::fmt(&(self.0).1, fmt)
@@ -2346,7 +2196,7 @@ impl<'a> Formatter<'a> {
/// where
/// K: 'a + fmt::Debug, V: 'a + fmt::Debug
/// {
- /// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_set()
/// .entries(self.0.iter().map(Arm))
/// .entry(&Arm(&(format_args!("_"), &self.1)))
@@ -2370,7 +2220,7 @@ impl<'a> Formatter<'a> {
/// struct Foo(Vec<(String, i32)>);
///
/// impl fmt::Debug for Foo {
- /// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ /// fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// fmt.debug_map().entries(self.0.iter().map(|&(ref k, ref v)| (k, v))).finish()
/// }
/// }
@@ -2429,6 +2279,7 @@ fmt_refs! { Debug, Display, Octal, Binary, LowerHex, UpperHex, LowerExp, UpperEx
#[unstable(feature = "never_type", issue = "35121")]
impl Debug for ! {
+ #[inline]
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
@@ -2436,6 +2287,7 @@ impl Debug for ! {
#[unstable(feature = "never_type", issue = "35121")]
impl Display for ! {
+ #[inline]
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
@@ -2538,13 +2390,13 @@ pub(crate) fn pointer_fmt_inner(ptr_addr: usize, f: &mut Formatter<'_>) -> Resul
// or not to zero extend, and then unconditionally set it to get the
// prefix.
if f.alternate() {
- f.flags |= 1 << (FlagV1::SignAwareZeroPad as u32);
+ f.flags |= 1 << (rt::Flag::SignAwareZeroPad as u32);
if f.width.is_none() {
f.width = Some((usize::BITS / 4) as usize + 2);
}
}
- f.flags |= 1 << (FlagV1::Alternate as u32);
+ f.flags |= 1 << (rt::Flag::Alternate as u32);
let ret = LowerHex::fmt(&ptr_addr, f);
diff --git a/library/core/src/fmt/num.rs b/library/core/src/fmt/num.rs
index d8365ae9b..4f42f73eb 100644
--- a/library/core/src/fmt/num.rs
+++ b/library/core/src/fmt/num.rs
@@ -52,8 +52,12 @@ impl_int! { i8 i16 i32 i64 i128 isize }
impl_uint! { u8 u16 u32 u64 u128 usize }
/// A type that represents a specific radix
+///
+/// # Safety
+///
+/// `digit` must return an ASCII character.
#[doc(hidden)]
-trait GenericRadix: Sized {
+unsafe trait GenericRadix: Sized {
/// The number of digits.
const BASE: u8;
@@ -129,7 +133,7 @@ struct UpperHex;
macro_rules! radix {
($T:ident, $base:expr, $prefix:expr, $($x:pat => $conv:expr),+) => {
- impl GenericRadix for $T {
+ unsafe impl GenericRadix for $T {
const BASE: u8 = $base;
const PREFIX: &'static str = $prefix;
fn digit(x: u8) -> u8 {
@@ -407,7 +411,7 @@ macro_rules! impl_Exp {
let parts = &[
numfmt::Part::Copy(buf_slice),
numfmt::Part::Zero(added_precision),
- numfmt::Part::Copy(exp_slice)
+ numfmt::Part::Copy(exp_slice),
];
let sign = if !is_nonnegative {
"-"
@@ -416,8 +420,9 @@ macro_rules! impl_Exp {
} else {
""
};
- let formatted = numfmt::Formatted{sign, parts};
- f.pad_formatted_parts(&formatted)
+ let formatted = numfmt::Formatted { sign, parts };
+ // SAFETY: `buf_slice` and `exp_slice` contain only ASCII characters.
+ unsafe { f.pad_formatted_parts(&formatted) }
}
$(
diff --git a/library/core/src/fmt/rt.rs b/library/core/src/fmt/rt.rs
new file mode 100644
index 000000000..d37888c27
--- /dev/null
+++ b/library/core/src/fmt/rt.rs
@@ -0,0 +1,212 @@
+#![allow(missing_debug_implementations)]
+#![unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
+
+//! These are the lang items used by format_args!().
+
+use super::*;
+
+#[lang = "format_placeholder"]
+#[derive(Copy, Clone)]
+pub struct Placeholder {
+ pub position: usize,
+ pub fill: char,
+ pub align: Alignment,
+ pub flags: u32,
+ pub precision: Count,
+ pub width: Count,
+}
+
+impl Placeholder {
+ #[inline(always)]
+ pub const fn new(
+ position: usize,
+ fill: char,
+ align: Alignment,
+ flags: u32,
+ precision: Count,
+ width: Count,
+ ) -> Self {
+ Self { position, fill, align, flags, precision, width }
+ }
+}
+
+#[lang = "format_alignment"]
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub enum Alignment {
+ Left,
+ Right,
+ Center,
+ Unknown,
+}
+
+/// Used by [width](https://doc.rust-lang.org/std/fmt/#width)
+/// and [precision](https://doc.rust-lang.org/std/fmt/#precision) specifiers.
+#[lang = "format_count"]
+#[derive(Copy, Clone)]
+pub enum Count {
+ /// Specified with a literal number, stores the value
+ Is(usize),
+ /// Specified using `$` and `*` syntaxes, stores the index into `args`
+ Param(usize),
+ /// Not specified
+ Implied,
+}
+
+// This needs to match the order of flags in compiler/rustc_ast_lowering/src/format.rs.
+#[derive(Copy, Clone)]
+pub(super) enum Flag {
+ SignPlus,
+ SignMinus,
+ Alternate,
+ SignAwareZeroPad,
+ DebugLowerHex,
+ DebugUpperHex,
+}
+
+/// This struct represents the generic "argument" which is taken by format_args!().
+/// It contains a function to format the given value. At compile time it is ensured that the
+/// function and the value have the correct types, and then this struct is used to canonicalize
+/// arguments to one type.
+///
+/// Argument is essentially an optimized partially applied formatting function,
+/// equivalent to `exists T.(&T, fn(&T, &mut Formatter<'_>) -> Result`.
+#[lang = "format_argument"]
+#[derive(Copy, Clone)]
+pub struct Argument<'a> {
+ value: &'a Opaque,
+ formatter: fn(&Opaque, &mut Formatter<'_>) -> Result,
+}
+
+#[rustc_diagnostic_item = "ArgumentMethods"]
+impl<'a> Argument<'a> {
+ #[inline(always)]
+ fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> Argument<'b> {
+ // SAFETY: `mem::transmute(x)` is safe because
+ // 1. `&'b T` keeps the lifetime it originated with `'b`
+ // (so as to not have an unbounded lifetime)
+ // 2. `&'b T` and `&'b Opaque` have the same memory layout
+ // (when `T` is `Sized`, as it is here)
+ // `mem::transmute(f)` is safe since `fn(&T, &mut Formatter<'_>) -> Result`
+ // and `fn(&Opaque, &mut Formatter<'_>) -> Result` have the same ABI
+ // (as long as `T` is `Sized`)
+ unsafe { Argument { formatter: mem::transmute(f), value: mem::transmute(x) } }
+ }
+
+ #[inline(always)]
+ pub fn new_display<'b, T: Display>(x: &'b T) -> Argument<'_> {
+ Self::new(x, Display::fmt)
+ }
+ #[inline(always)]
+ pub fn new_debug<'b, T: Debug>(x: &'b T) -> Argument<'_> {
+ Self::new(x, Debug::fmt)
+ }
+ #[inline(always)]
+ pub fn new_octal<'b, T: Octal>(x: &'b T) -> Argument<'_> {
+ Self::new(x, Octal::fmt)
+ }
+ #[inline(always)]
+ pub fn new_lower_hex<'b, T: LowerHex>(x: &'b T) -> Argument<'_> {
+ Self::new(x, LowerHex::fmt)
+ }
+ #[inline(always)]
+ pub fn new_upper_hex<'b, T: UpperHex>(x: &'b T) -> Argument<'_> {
+ Self::new(x, UpperHex::fmt)
+ }
+ #[inline(always)]
+ pub fn new_pointer<'b, T: Pointer>(x: &'b T) -> Argument<'_> {
+ Self::new(x, Pointer::fmt)
+ }
+ #[inline(always)]
+ pub fn new_binary<'b, T: Binary>(x: &'b T) -> Argument<'_> {
+ Self::new(x, Binary::fmt)
+ }
+ #[inline(always)]
+ pub fn new_lower_exp<'b, T: LowerExp>(x: &'b T) -> Argument<'_> {
+ Self::new(x, LowerExp::fmt)
+ }
+ #[inline(always)]
+ pub fn new_upper_exp<'b, T: UpperExp>(x: &'b T) -> Argument<'_> {
+ Self::new(x, UpperExp::fmt)
+ }
+ #[inline(always)]
+ pub fn from_usize(x: &usize) -> Argument<'_> {
+ Self::new(x, USIZE_MARKER)
+ }
+
+ #[inline(always)]
+ pub(super) fn fmt(&self, f: &mut Formatter<'_>) -> Result {
+ (self.formatter)(self.value, f)
+ }
+
+ #[inline(always)]
+ pub(super) fn as_usize(&self) -> Option<usize> {
+ // We are type punning a bit here: USIZE_MARKER only takes an &usize but
+ // formatter takes an &Opaque. Rust understandably doesn't think we should compare
+ // the function pointers if they don't have the same signature, so we cast to
+ // usizes to tell it that we just want to compare addresses.
+ if self.formatter as usize == USIZE_MARKER as usize {
+ // SAFETY: The `formatter` field is only set to USIZE_MARKER if
+ // the value is a usize, so this is safe
+ Some(unsafe { *(self.value as *const _ as *const usize) })
+ } else {
+ None
+ }
+ }
+
+ /// Used by `format_args` when all arguments are gone after inlining,
+ /// when using `&[]` would incorrectly allow for a bigger lifetime.
+ ///
+ /// This fails without format argument inlining, and that shouldn't be different
+ /// when the argument is inlined:
+ ///
+ /// ```compile_fail,E0716
+ /// let f = format_args!("{}", "a");
+ /// println!("{f}");
+ /// ```
+ #[inline(always)]
+ pub fn none() -> [Self; 0] {
+ []
+ }
+}
+
+/// This struct represents the unsafety of constructing an `Arguments`.
+/// It exists, rather than an unsafe function, in order to simplify the expansion
+/// of `format_args!(..)` and reduce the scope of the `unsafe` block.
+#[lang = "format_unsafe_arg"]
+pub struct UnsafeArg {
+ _private: (),
+}
+
+impl UnsafeArg {
+ /// See documentation where `UnsafeArg` is required to know when it is safe to
+ /// create and use `UnsafeArg`.
+ #[inline(always)]
+ pub unsafe fn new() -> Self {
+ Self { _private: () }
+ }
+}
+
+extern "C" {
+ type Opaque;
+}
+
+// This guarantees a single stable value for the function pointer associated with
+// indices/counts in the formatting infrastructure.
+//
+// Note that a function defined as such would not be correct as functions are
+// always tagged unnamed_addr with the current lowering to LLVM IR, so their
+// address is not considered important to LLVM and as such the as_usize cast
+// could have been miscompiled. In practice, we never call as_usize on non-usize
+// containing data (as a matter of static generation of the formatting
+// arguments), so this is merely an additional check.
+//
+// We primarily want to ensure that the function pointer at `USIZE_MARKER` has
+// an address corresponding *only* to functions that also take `&usize` as their
+// first argument. The read_volatile here ensures that we can safely ready out a
+// usize from the passed reference and that this address does not point at a
+// non-usize taking function.
+static USIZE_MARKER: fn(&usize, &mut Formatter<'_>) -> Result = |ptr, _| {
+ // SAFETY: ptr is a reference
+ let _v: usize = unsafe { crate::ptr::read_volatile(ptr) };
+ loop {}
+};
diff --git a/library/core/src/fmt/rt/v1.rs b/library/core/src/fmt/rt/v1.rs
deleted file mode 100644
index 6d70796f7..000000000
--- a/library/core/src/fmt/rt/v1.rs
+++ /dev/null
@@ -1,63 +0,0 @@
-//! This is an internal module used by the ifmt! runtime. These structures are
-//! emitted to static arrays to precompile format strings ahead of time.
-//!
-//! These definitions are similar to their `ct` equivalents, but differ in that
-//! these can be statically allocated and are slightly optimized for the runtime
-#![allow(missing_debug_implementations)]
-
-#[lang = "format_placeholder"]
-#[derive(Copy, Clone)]
-// FIXME: Rename this to Placeholder
-pub struct Argument {
- pub position: usize,
- pub format: FormatSpec,
-}
-
-#[derive(Copy, Clone)]
-pub struct FormatSpec {
- pub fill: char,
- pub align: Alignment,
- pub flags: u32,
- pub precision: Count,
- pub width: Count,
-}
-
-impl Argument {
- #[inline(always)]
- pub const fn new(
- position: usize,
- fill: char,
- align: Alignment,
- flags: u32,
- precision: Count,
- width: Count,
- ) -> Self {
- Self { position, format: FormatSpec { fill, align, flags, precision, width } }
- }
-}
-
-/// Possible alignments that can be requested as part of a formatting directive.
-#[lang = "format_alignment"]
-#[derive(Copy, Clone, PartialEq, Eq)]
-pub enum Alignment {
- /// Indication that contents should be left-aligned.
- Left,
- /// Indication that contents should be right-aligned.
- Right,
- /// Indication that contents should be center-aligned.
- Center,
- /// No alignment was requested.
- Unknown,
-}
-
-/// Used by [width](https://doc.rust-lang.org/std/fmt/#width) and [precision](https://doc.rust-lang.org/std/fmt/#precision) specifiers.
-#[lang = "format_count"]
-#[derive(Copy, Clone)]
-pub enum Count {
- /// Specified with a literal number, stores the value
- Is(usize),
- /// Specified using `$` and `*` syntaxes, stores the index into `args`
- Param(usize),
- /// Not specified
- Implied,
-}
diff --git a/library/core/src/future/into_future.rs b/library/core/src/future/into_future.rs
index 649b43387..38c654e76 100644
--- a/library/core/src/future/into_future.rs
+++ b/library/core/src/future/into_future.rs
@@ -99,6 +99,7 @@ use crate::future::Future;
/// }
/// ```
#[stable(feature = "into_future", since = "1.64.0")]
+#[rustc_diagnostic_item = "IntoFuture"]
pub trait IntoFuture {
/// The output that the future will produce on completion.
#[stable(feature = "into_future", since = "1.64.0")]
diff --git a/library/core/src/future/join.rs b/library/core/src/future/join.rs
index 35f0dea06..3f35179dd 100644
--- a/library/core/src/future/join.rs
+++ b/library/core/src/future/join.rs
@@ -4,7 +4,7 @@ use crate::cell::UnsafeCell;
use crate::future::{poll_fn, Future};
use crate::mem;
use crate::pin::Pin;
-use crate::task::{Context, Poll};
+use crate::task::{ready, Context, Poll};
/// Polls multiple futures simultaneously, returning a tuple
/// of all results once complete.
@@ -118,7 +118,7 @@ macro join_internal {
fut
})
};
- // Despite how tempting it may be to `let () = fut.poll(cx).ready()?;`
+ // Despite how tempting it may be to `let () = ready!(fut.poll(cx));`
// doing so would defeat the point of `join!`: to start polling eagerly all
// of the futures, to allow parallelizing the waits.
done &= fut.poll(cx).is_ready();
@@ -180,7 +180,7 @@ impl<F: Future> Future for MaybeDone<F> {
// Do not mix match ergonomics with unsafe.
match *self.as_mut().get_unchecked_mut() {
MaybeDone::Future(ref mut f) => {
- let val = Pin::new_unchecked(f).poll(cx).ready()?;
+ let val = ready!(Pin::new_unchecked(f).poll(cx));
self.set(Self::Done(val));
}
MaybeDone::Done(_) => {}
diff --git a/library/core/src/future/mod.rs b/library/core/src/future/mod.rs
index 04f02d47f..089493d37 100644
--- a/library/core/src/future/mod.rs
+++ b/library/core/src/future/mod.rs
@@ -66,11 +66,3 @@ pub unsafe fn get_context<'a, 'b>(cx: ResumeTy) -> &'a mut Context<'b> {
// that fulfills all the requirements for a mutable reference.
unsafe { &mut *cx.0.as_ptr().cast() }
}
-
-#[doc(hidden)]
-#[unstable(feature = "gen_future", issue = "50547")]
-#[inline]
-#[cfg_attr(bootstrap, lang = "identity_future")]
-pub const fn identity_future<O, Fut: Future<Output = O>>(f: Fut) -> Fut {
- f
-}
diff --git a/library/core/src/hash/mod.rs b/library/core/src/hash/mod.rs
index 4e7bae7bc..794a57f09 100644
--- a/library/core/src/hash/mod.rs
+++ b/library/core/src/hash/mod.rs
@@ -86,8 +86,7 @@
#![stable(feature = "rust1", since = "1.0.0")]
use crate::fmt;
-use crate::intrinsics::const_eval_select;
-use crate::marker::{self, Destruct};
+use crate::marker;
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated)]
@@ -184,7 +183,6 @@ mod sip;
/// [impl]: ../../std/primitive.str.html#impl-Hash-for-str
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "Hash"]
-#[const_trait]
pub trait Hash {
/// Feeds this value into the given [`Hasher`].
///
@@ -199,7 +197,7 @@ pub trait Hash {
/// println!("Hash is {:x}!", hasher.finish());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- fn hash<H: ~const Hasher>(&self, state: &mut H);
+ fn hash<H: Hasher>(&self, state: &mut H);
/// Feeds a slice of this type into the given [`Hasher`].
///
@@ -236,25 +234,13 @@ pub trait Hash {
/// [`hash`]: Hash::hash
/// [`hash_slice`]: Hash::hash_slice
#[stable(feature = "hash_slice", since = "1.3.0")]
- fn hash_slice<H: ~const Hasher>(data: &[Self], state: &mut H)
+ fn hash_slice<H: Hasher>(data: &[Self], state: &mut H)
where
Self: Sized,
{
- //FIXME(const_trait_impl): revert to only a for loop
- fn rt<T: Hash, H: Hasher>(data: &[T], state: &mut H) {
- for piece in data {
- piece.hash(state)
- }
- }
- const fn ct<T: ~const Hash, H: ~const Hasher>(data: &[T], state: &mut H) {
- let mut i = 0;
- while i < data.len() {
- data[i].hash(state);
- i += 1;
- }
+ for piece in data {
+ piece.hash(state)
}
- // SAFETY: same behavior, CT just uses while instead of for
- unsafe { const_eval_select((data, state), ct, rt) };
}
}
@@ -327,7 +313,6 @@ pub use macros::Hash;
/// [`write_u8`]: Hasher::write_u8
/// [`write_u32`]: Hasher::write_u32
#[stable(feature = "rust1", since = "1.0.0")]
-#[const_trait]
pub trait Hasher {
/// Returns the hash value for the values written so far.
///
@@ -573,8 +558,7 @@ pub trait Hasher {
}
#[stable(feature = "indirect_hasher_impl", since = "1.22.0")]
-#[rustc_const_unstable(feature = "const_hash", issue = "104061")]
-impl<H: ~const Hasher + ?Sized> const Hasher for &mut H {
+impl<H: Hasher + ?Sized> Hasher for &mut H {
fn finish(&self) -> u64 {
(**self).finish()
}
@@ -654,7 +638,6 @@ impl<H: ~const Hasher + ?Sized> const Hasher for &mut H {
/// [`build_hasher`]: BuildHasher::build_hasher
/// [`HashMap`]: ../../std/collections/struct.HashMap.html
#[stable(since = "1.7.0", feature = "build_hasher")]
-#[const_trait]
pub trait BuildHasher {
/// Type of the hasher that will be created.
#[stable(since = "1.7.0", feature = "build_hasher")]
@@ -691,8 +674,6 @@ pub trait BuildHasher {
/// # Example
///
/// ```
- /// #![feature(build_hasher_simple_hash_one)]
- ///
/// use std::cmp::{max, min};
/// use std::hash::{BuildHasher, Hash, Hasher};
/// struct OrderAmbivalentPair<T: Ord>(T, T);
@@ -714,11 +695,11 @@ pub trait BuildHasher {
/// bh.hash_one(&OrderAmbivalentPair(2, 10))
/// );
/// ```
- #[unstable(feature = "build_hasher_simple_hash_one", issue = "86161")]
- fn hash_one<T: ~const Hash + ~const Destruct>(&self, x: T) -> u64
+ #[stable(feature = "build_hasher_simple_hash_one", since = "1.71.0")]
+ fn hash_one<T: Hash>(&self, x: T) -> u64
where
Self: Sized,
- Self::Hasher: ~const Hasher + ~const Destruct,
+ Self::Hasher: Hasher,
{
let mut hasher = self.build_hasher();
x.hash(&mut hasher);
@@ -782,8 +763,7 @@ impl<H> fmt::Debug for BuildHasherDefault<H> {
}
#[stable(since = "1.7.0", feature = "build_hasher")]
-#[rustc_const_unstable(feature = "const_hash", issue = "104061")]
-impl<H: ~const Default + Hasher> const BuildHasher for BuildHasherDefault<H> {
+impl<H: Default + Hasher> BuildHasher for BuildHasherDefault<H> {
type Hasher = H;
fn build_hasher(&self) -> H {
@@ -799,8 +779,7 @@ impl<H> Clone for BuildHasherDefault<H> {
}
#[stable(since = "1.7.0", feature = "build_hasher")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl<H> const Default for BuildHasherDefault<H> {
+impl<H> Default for BuildHasherDefault<H> {
fn default() -> BuildHasherDefault<H> {
BuildHasherDefault(marker::PhantomData)
}
@@ -825,15 +804,14 @@ mod impls {
macro_rules! impl_write {
($(($ty:ident, $meth:ident),)*) => {$(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl const Hash for $ty {
+ impl Hash for $ty {
#[inline]
- fn hash<H: ~const Hasher>(&self, state: &mut H) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
state.$meth(*self)
}
#[inline]
- fn hash_slice<H: ~const Hasher>(data: &[$ty], state: &mut H) {
+ fn hash_slice<H: Hasher>(data: &[$ty], state: &mut H) {
let newlen = mem::size_of_val(data);
let ptr = data.as_ptr() as *const u8;
// SAFETY: `ptr` is valid and aligned, as this macro is only used
@@ -862,37 +840,33 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl const Hash for bool {
+ impl Hash for bool {
#[inline]
- fn hash<H: ~const Hasher>(&self, state: &mut H) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u8(*self as u8)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl const Hash for char {
+ impl Hash for char {
#[inline]
- fn hash<H: ~const Hasher>(&self, state: &mut H) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u32(*self as u32)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl const Hash for str {
+ impl Hash for str {
#[inline]
- fn hash<H: ~const Hasher>(&self, state: &mut H) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
state.write_str(self);
}
}
#[stable(feature = "never_hash", since = "1.29.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl const Hash for ! {
+ impl Hash for ! {
#[inline]
- fn hash<H: ~const Hasher>(&self, _: &mut H) {
+ fn hash<H: Hasher>(&self, _: &mut H) {
*self
}
}
@@ -900,10 +874,9 @@ mod impls {
macro_rules! impl_hash_tuple {
() => (
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl const Hash for () {
+ impl Hash for () {
#[inline]
- fn hash<H: ~const Hasher>(&self, _state: &mut H) {}
+ fn hash<H: Hasher>(&self, _state: &mut H) {}
}
);
@@ -911,11 +884,10 @@ mod impls {
maybe_tuple_doc! {
$($name)+ @
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl<$($name: ~const Hash),+> const Hash for ($($name,)+) where last_type!($($name,)+): ?Sized {
+ impl<$($name: Hash),+> Hash for ($($name,)+) where last_type!($($name,)+): ?Sized {
#[allow(non_snake_case)]
#[inline]
- fn hash<S: ~const Hasher>(&self, state: &mut S) {
+ fn hash<S: Hasher>(&self, state: &mut S) {
let ($(ref $name,)+) = *self;
$($name.hash(state);)+
}
@@ -958,29 +930,26 @@ mod impls {
impl_hash_tuple! { T B C D E F G H I J K L }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl<T: ~const Hash> const Hash for [T] {
+ impl<T: Hash> Hash for [T] {
#[inline]
- fn hash<H: ~const Hasher>(&self, state: &mut H) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
state.write_length_prefix(self.len());
Hash::hash_slice(self, state)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl<T: ?Sized + ~const Hash> const Hash for &T {
+ impl<T: ?Sized + Hash> Hash for &T {
#[inline]
- fn hash<H: ~const Hasher>(&self, state: &mut H) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_hash", issue = "104061")]
- impl<T: ?Sized + ~const Hash> const Hash for &mut T {
+ impl<T: ?Sized + Hash> Hash for &mut T {
#[inline]
- fn hash<H: ~const Hasher>(&self, state: &mut H) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
diff --git a/library/core/src/hash/sip.rs b/library/core/src/hash/sip.rs
index 7f8287bf5..6b9f2e842 100644
--- a/library/core/src/hash/sip.rs
+++ b/library/core/src/hash/sip.rs
@@ -118,7 +118,7 @@ macro_rules! load_int_le {
/// Safety: this performs unchecked indexing of `buf` at `start..start+len`, so
/// that must be in-bounds.
#[inline]
-const unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
+unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
debug_assert!(len < 8);
let mut i = 0; // current byte index (from LSB) in the output u64
let mut out = 0;
@@ -225,8 +225,7 @@ impl<S: Sip> Hasher<S> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_hash", issue = "104061")]
-impl const super::Hasher for SipHasher {
+impl super::Hasher for SipHasher {
#[inline]
fn write(&mut self, msg: &[u8]) {
self.0.hasher.write(msg)
@@ -244,8 +243,7 @@ impl const super::Hasher for SipHasher {
}
#[unstable(feature = "hashmap_internals", issue = "none")]
-#[rustc_const_unstable(feature = "const_hash", issue = "104061")]
-impl const super::Hasher for SipHasher13 {
+impl super::Hasher for SipHasher13 {
#[inline]
fn write(&mut self, msg: &[u8]) {
self.hasher.write(msg)
@@ -262,7 +260,7 @@ impl const super::Hasher for SipHasher13 {
}
}
-impl<S: ~const Sip> const super::Hasher for Hasher<S> {
+impl<S: Sip> super::Hasher for Hasher<S> {
// Note: no integer hashing methods (`write_u*`, `write_i*`) are defined
// for this type. We could add them, copy the `short_write` implementation
// in librustc_data_structures/sip128.rs, and add `write_u*`/`write_i*`
@@ -342,7 +340,7 @@ impl<S: ~const Sip> const super::Hasher for Hasher<S> {
}
}
-impl<S: Sip> const Clone for Hasher<S> {
+impl<S: Sip> Clone for Hasher<S> {
#[inline]
fn clone(&self) -> Hasher<S> {
Hasher {
@@ -366,7 +364,6 @@ impl<S: Sip> Default for Hasher<S> {
}
#[doc(hidden)]
-#[const_trait]
trait Sip {
fn c_rounds(_: &mut State);
fn d_rounds(_: &mut State);
@@ -375,7 +372,7 @@ trait Sip {
#[derive(Debug, Clone, Default)]
struct Sip13Rounds;
-impl const Sip for Sip13Rounds {
+impl Sip for Sip13Rounds {
#[inline]
fn c_rounds(state: &mut State) {
compress!(state);
@@ -392,7 +389,7 @@ impl const Sip for Sip13Rounds {
#[derive(Debug, Clone, Default)]
struct Sip24Rounds;
-impl const Sip for Sip24Rounds {
+impl Sip for Sip24Rounds {
#[inline]
fn c_rounds(state: &mut State) {
compress!(state);
diff --git a/library/core/src/hint.rs b/library/core/src/hint.rs
index a20556577..75c104ce2 100644
--- a/library/core/src/hint.rs
+++ b/library/core/src/hint.rs
@@ -73,8 +73,8 @@ use crate::intrinsics;
/// ```
///
/// While using `unreachable_unchecked()` is perfectly sound in the following
-/// example, the compiler is able to prove that a division by zero is not
-/// possible. Benchmarking reveals that `unreachable_unchecked()` provides
+/// example, as the compiler is able to prove that a division by zero is not
+/// possible, benchmarking reveals that `unreachable_unchecked()` provides
/// no benefit over using [`unreachable!`], while the latter does not introduce
/// the possibility of Undefined Behavior.
///
@@ -217,17 +217,14 @@ pub fn spin_loop() {
/// Note however, that `black_box` is only (and can only be) provided on a "best-effort" basis. The
/// extent to which it can block optimisations may vary depending upon the platform and code-gen
/// backend used. Programs cannot rely on `black_box` for *correctness*, beyond it behaving as the
-/// identity function.
+/// identity function. As such, it **must not be relied upon to control critical program behavior.**
+/// This _immediately_ precludes any direct use of this function for cryptographic or security
+/// purposes.
///
/// [`std::convert::identity`]: crate::convert::identity
///
/// # When is this useful?
///
-/// First and foremost: `black_box` does _not_ guarantee any exact behavior and, in some cases, may
-/// do nothing at all. As such, it **must not be relied upon to control critical program behavior.**
-/// This _immediately_ precludes any direct use of this function for cryptographic or security
-/// purposes.
-///
/// While not suitable in those mission-critical cases, `black_box`'s functionality can generally be
/// relied upon for benchmarking, and should be used there. It will try to ensure that the
/// compiler doesn't optimize away part of the intended test code based on context. For
diff --git a/library/core/src/internal_macros.rs b/library/core/src/internal_macros.rs
index 5d4c9ba73..5774107f5 100644
--- a/library/core/src/internal_macros.rs
+++ b/library/core/src/internal_macros.rs
@@ -1,23 +1,10 @@
// implements the unary operator "op &T"
// based on "op T" where T is expected to be `Copy`able
macro_rules! forward_ref_unop {
- (impl const $imp:ident, $method:ident for $t:ty) => {
- forward_ref_unop!(impl const $imp, $method for $t,
+ (impl $imp:ident, $method:ident for $t:ty) => {
+ forward_ref_unop!(impl $imp, $method for $t,
#[stable(feature = "rust1", since = "1.0.0")]);
};
- // Equivalent to the non-const version, with the addition of `rustc_const_unstable`
- (impl const $imp:ident, $method:ident for $t:ty, #[$attr:meta]) => {
- #[$attr]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const $imp for &$t {
- type Output = <$t as $imp>::Output;
-
- #[inline]
- fn $method(self) -> <$t as $imp>::Output {
- $imp::$method(*self)
- }
- }
- };
(impl $imp:ident, $method:ident for $t:ty, #[$attr:meta]) => {
#[$attr]
impl $imp for &$t {
@@ -34,45 +21,10 @@ macro_rules! forward_ref_unop {
// implements binary operators "&T op U", "T op &U", "&T op &U"
// based on "T op U" where T and U are expected to be `Copy`able
macro_rules! forward_ref_binop {
- (impl const $imp:ident, $method:ident for $t:ty, $u:ty) => {
- forward_ref_binop!(impl const $imp, $method for $t, $u,
+ (impl $imp:ident, $method:ident for $t:ty, $u:ty) => {
+ forward_ref_binop!(impl $imp, $method for $t, $u,
#[stable(feature = "rust1", since = "1.0.0")]);
};
- // Equivalent to the non-const version, with the addition of `rustc_const_unstable`
- (impl const $imp:ident, $method:ident for $t:ty, $u:ty, #[$attr:meta]) => {
- #[$attr]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl<'a> const $imp<$u> for &'a $t {
- type Output = <$t as $imp<$u>>::Output;
-
- #[inline]
- fn $method(self, other: $u) -> <$t as $imp<$u>>::Output {
- $imp::$method(*self, other)
- }
- }
-
- #[$attr]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const $imp<&$u> for $t {
- type Output = <$t as $imp<$u>>::Output;
-
- #[inline]
- fn $method(self, other: &$u) -> <$t as $imp<$u>>::Output {
- $imp::$method(self, *other)
- }
- }
-
- #[$attr]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const $imp<&$u> for &$t {
- type Output = <$t as $imp<$u>>::Output;
-
- #[inline]
- fn $method(self, other: &$u) -> <$t as $imp<$u>>::Output {
- $imp::$method(*self, *other)
- }
- }
- };
(impl $imp:ident, $method:ident for $t:ty, $u:ty, #[$attr:meta]) => {
#[$attr]
impl<'a> $imp<$u> for &'a $t {
@@ -113,21 +65,6 @@ macro_rules! forward_ref_op_assign {
forward_ref_op_assign!(impl $imp, $method for $t, $u,
#[stable(feature = "op_assign_builtins_by_ref", since = "1.22.0")]);
};
- (impl const $imp:ident, $method:ident for $t:ty, $u:ty) => {
- forward_ref_op_assign!(impl const $imp, $method for $t, $u,
- #[stable(feature = "op_assign_builtins_by_ref", since = "1.22.0")]);
- };
- // Equivalent to the non-const version, with the addition of `rustc_const_unstable`
- (impl const $imp:ident, $method:ident for $t:ty, $u:ty, #[$attr:meta]) => {
- #[$attr]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const $imp<&$u> for $t {
- #[inline]
- fn $method(&mut self, other: &$u) {
- $imp::$method(self, *other);
- }
- }
- };
(impl $imp:ident, $method:ident for $t:ty, $u:ty, #[$attr:meta]) => {
#[$attr]
impl $imp<&$u> for $t {
diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs
index a7c100e1b..f5c5dd29f 100644
--- a/library/core/src/intrinsics.rs
+++ b/library/core/src/intrinsics.rs
@@ -1187,7 +1187,7 @@ extern "rust-intrinsic" {
/// Below are common applications of `transmute` which can be replaced with safer
/// constructs.
///
- /// Turning raw bytes (`&[u8]`) into `u32`, `f64`, etc.:
+ /// Turning raw bytes (`[u8; SZ]`) into `u32`, `f64`, etc.:
///
/// ```
/// let raw_bytes = [0x78, 0x56, 0x34, 0x12];
@@ -1376,6 +1376,20 @@ extern "rust-intrinsic" {
#[rustc_nounwind]
pub fn transmute<Src, Dst>(src: Src) -> Dst;
+ /// Like [`transmute`], but even less checked at compile-time: rather than
+ /// giving an error for `size_of::<Src>() != size_of::<Dst>()`, it's
+ /// **Undefined Behaviour** at runtime.
+ ///
+ /// Prefer normal `transmute` where possible, for the extra checking, since
+ /// both do exactly the same thing at runtime, if they both compile.
+ ///
+ /// This is not expected to ever be exposed directly to users, rather it
+ /// may eventually be exposed through some more-constrained API.
+ #[cfg(not(bootstrap))]
+ #[rustc_const_stable(feature = "const_transmute", since = "1.56.0")]
+ #[rustc_nounwind]
+ pub fn transmute_unchecked<Src, Dst>(src: Src) -> Dst;
+
/// Returns `true` if the actual type given as `T` requires drop
/// glue; returns `false` if the actual type provided for `T`
/// implements `Copy`.
@@ -1399,6 +1413,10 @@ extern "rust-intrinsic" {
/// This is implemented as an intrinsic to avoid converting to and from an
/// integer, since the conversion would throw away aliasing information.
///
+ /// This can only be used with `Ptr` as a raw pointer type (`*mut` or `*const`)
+ /// to a `Sized` pointee and with `Delta` as `usize` or `isize`. Any other
+ /// instantiations may arbitrarily misbehave, and that's *not* a compiler bug.
+ ///
/// # Safety
///
/// Both the starting and resulting pointer must be either in bounds or one
@@ -1407,6 +1425,14 @@ extern "rust-intrinsic" {
/// returned value will result in undefined behavior.
///
/// The stabilized version of this intrinsic is [`pointer::offset`].
+ #[cfg(not(bootstrap))]
+ #[must_use = "returns a new pointer rather than modifying its argument"]
+ #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
+ #[rustc_nounwind]
+ pub fn offset<Ptr, Delta>(dst: Ptr, offset: Delta) -> Ptr;
+
+ /// The bootstrap version of this is more restricted.
+ #[cfg(bootstrap)]
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
#[rustc_nounwind]
@@ -1797,14 +1823,12 @@ extern "rust-intrinsic" {
/// with an even least significant digit.
///
/// This intrinsic does not have a stable counterpart.
- #[cfg(not(bootstrap))]
#[rustc_nounwind]
pub fn roundevenf32(x: f32) -> f32;
/// Returns the nearest integer to an `f64`. Rounds half-way cases to the number
/// with an even least significant digit.
///
/// This intrinsic does not have a stable counterpart.
- #[cfg(not(bootstrap))]
#[rustc_nounwind]
pub fn roundevenf64(x: f64) -> f64;
@@ -2233,13 +2257,23 @@ extern "rust-intrinsic" {
/// This is an implementation detail of [`crate::ptr::read`] and should
/// not be used anywhere else. See its comments for why this exists.
///
- /// This intrinsic can *only* be called where the argument is a local without
- /// projections (`read_via_copy(p)`, not `read_via_copy(*p)`) so that it
+ /// This intrinsic can *only* be called where the pointer is a local without
+ /// projections (`read_via_copy(ptr)`, not `read_via_copy(*ptr)`) so that it
/// trivially obeys runtime-MIR rules about derefs in operands.
+ #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
+ #[rustc_nounwind]
+ pub fn read_via_copy<T>(ptr: *const T) -> T;
+
+ /// This is an implementation detail of [`crate::ptr::write`] and should
+ /// not be used anywhere else. See its comments for why this exists.
+ ///
+ /// This intrinsic can *only* be called where the pointer is a local without
+ /// projections (`write_via_move(ptr, x)`, not `write_via_move(*ptr, x)`) so
+ /// that it trivially obeys runtime-MIR rules about derefs in operands.
#[cfg(not(bootstrap))]
- #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
+ #[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
#[rustc_nounwind]
- pub fn read_via_copy<T>(p: *const T) -> T;
+ pub fn write_via_move<T>(ptr: *mut T, value: T);
/// Returns the value of the discriminant for the variant in 'v';
/// if `T` has no discriminant, returns `0`.
@@ -2444,7 +2478,6 @@ extern "rust-intrinsic" {
/// This method creates a pointer to any `Some` value. If the argument is
/// `None`, an invalid within-bounds pointer (that is still acceptable for
/// constructing an empty slice) is returned.
- #[cfg(not(bootstrap))]
#[rustc_nounwind]
pub fn option_payload_ptr<T>(arg: *const Option<T>) -> *const T;
}
@@ -2460,7 +2493,7 @@ extern "rust-intrinsic" {
/// This macro should be called as `assert_unsafe_precondition!([Generics](name: Type) => Expression)`
/// where the names specified will be moved into the macro as captured variables, and defines an item
/// to call `const_eval_select` on. The tokens inside the square brackets are used to denote generics
-/// for the function declaractions and can be omitted if there is no generics.
+/// for the function declarations and can be omitted if there is no generics.
///
/// # Safety
///
@@ -2490,6 +2523,7 @@ macro_rules! assert_unsafe_precondition {
}
}
#[allow(non_snake_case)]
+ #[inline]
const fn comptime$(<$($tt)*>)?($(_:$ty),*) {}
::core::intrinsics::const_eval_select(($($i,)*), comptime, runtime);
@@ -2519,7 +2553,9 @@ pub(crate) fn is_valid_allocation_size<T>(len: usize) -> bool {
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
let src_usize = src.addr();
let dst_usize = dst.addr();
- let size = mem::size_of::<T>().checked_mul(count).unwrap();
+ let size = mem::size_of::<T>()
+ .checked_mul(count)
+ .expect("is_nonoverlapping: `size_of::<T>() * count` overflows a usize");
let diff = if src_usize > dst_usize { src_usize - dst_usize } else { dst_usize - src_usize };
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
// they do not overlap.
@@ -2717,7 +2753,7 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
// SAFETY: the safety contract for `copy` must be upheld by the caller.
unsafe {
assert_unsafe_precondition!(
- "ptr::copy requires that both pointer arguments are aligned aligned and non-null",
+ "ptr::copy requires that both pointer arguments are aligned and non-null",
[T](src: *const T, dst: *mut T) =>
is_aligned_and_not_null(src) && is_aligned_and_not_null(dst)
);
@@ -2796,3 +2832,24 @@ pub const unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
write_bytes(dst, val, count)
}
}
+
+/// Polyfill for bootstrap
+#[cfg(bootstrap)]
+pub const unsafe fn transmute_unchecked<Src, Dst>(src: Src) -> Dst {
+ use crate::mem::*;
+ // SAFETY: It's a transmute -- the caller promised it's fine.
+ unsafe { transmute_copy(&ManuallyDrop::new(src)) }
+}
+
+/// Polyfill for bootstrap
+#[cfg(bootstrap)]
+pub const unsafe fn write_via_move<T>(ptr: *mut T, value: T) {
+ use crate::mem::*;
+ // SAFETY: the caller must guarantee that `dst` is valid for writes.
+ // `dst` cannot overlap `src` because the caller has mutable access
+ // to `dst` while `src` is owned by this function.
+ unsafe {
+ copy_nonoverlapping::<T>(&value, ptr, 1);
+ forget(value);
+ }
+}
diff --git a/library/core/src/intrinsics/mir.rs b/library/core/src/intrinsics/mir.rs
index 45498a54b..5944a0de1 100644
--- a/library/core/src/intrinsics/mir.rs
+++ b/library/core/src/intrinsics/mir.rs
@@ -15,7 +15,6 @@
//! ```rust
//! #![feature(core_intrinsics, custom_mir)]
//!
-//! extern crate core;
//! use core::intrinsics::mir::*;
//!
//! #[custom_mir(dialect = "built")]
@@ -65,7 +64,6 @@
//! ```rust
//! #![feature(core_intrinsics, custom_mir)]
//!
-//! extern crate core;
//! use core::intrinsics::mir::*;
//!
//! #[custom_mir(dialect = "built")]
@@ -230,7 +228,7 @@
//!
//! - Operands implicitly convert to `Use` rvalues.
//! - `&`, `&mut`, `addr_of!`, and `addr_of_mut!` all work to create their associated rvalue.
-//! - [`Discriminant`] and [`Len`] have associated functions.
+//! - [`Discriminant`], [`Len`], and [`CopyForDeref`] have associated functions.
//! - Unary and binary operations use their normal Rust syntax - `a * b`, `!c`, etc.
//! - The binary operation `Offset` can be created via [`Offset`].
//! - Checked binary operations are represented by wrapping the associated binop in [`Checked`].
@@ -265,6 +263,7 @@ pub struct BasicBlock;
macro_rules! define {
($name:literal, $( #[ $meta:meta ] )* fn $($sig:tt)*) => {
#[rustc_diagnostic_item = $name]
+ #[inline]
$( #[ $meta ] )*
pub fn $($sig)* { panic!() }
}
@@ -280,6 +279,7 @@ define!("mir_storage_dead", fn StorageDead<T>(local: T));
define!("mir_deinit", fn Deinit<T>(place: T));
define!("mir_checked", fn Checked<T>(binop: T) -> (T, bool));
define!("mir_len", fn Len<T>(place: T) -> usize);
+define!("mir_copy_for_deref", fn CopyForDeref<T>(place: T) -> T);
define!("mir_retag", fn Retag<T>(place: T));
define!("mir_move", fn Move<T>(place: T) -> T);
define!("mir_static", fn Static<T>(s: T) -> &'static T);
@@ -317,7 +317,6 @@ define!(
/// ```rust
/// #![feature(custom_mir, core_intrinsics)]
///
- /// extern crate core;
/// use core::intrinsics::mir::*;
///
/// #[custom_mir(dialect = "built")]
diff --git a/library/core/src/iter/adapters/chain.rs b/library/core/src/iter/adapters/chain.rs
index 75727c3a2..26aa959e6 100644
--- a/library/core/src/iter/adapters/chain.rs
+++ b/library/core/src/iter/adapters/chain.rs
@@ -15,7 +15,7 @@ use crate::ops::Try;
///
/// let a1 = [1, 2, 3];
/// let a2 = [4, 5, 6];
-/// let iter: Chain<Iter<_>, Iter<_>> = a1.iter().chain(a2.iter());
+/// let iter: Chain<Iter<'_, _>, Iter<'_, _>> = a1.iter().chain(a2.iter());
/// ```
#[derive(Clone, Debug)]
#[must_use = "iterators are lazy and do nothing unless consumed"]
diff --git a/library/core/src/iter/adapters/filter.rs b/library/core/src/iter/adapters/filter.rs
index a0afaa326..723657b9e 100644
--- a/library/core/src/iter/adapters/filter.rs
+++ b/library/core/src/iter/adapters/filter.rs
@@ -1,6 +1,9 @@
use crate::fmt;
use crate::iter::{adapters::SourceIter, FusedIterator, InPlaceIterable};
use crate::ops::Try;
+use core::array;
+use core::mem::{ManuallyDrop, MaybeUninit};
+use core::ops::ControlFlow;
/// An iterator that filters the elements of `iter` with `predicate`.
///
@@ -57,6 +60,58 @@ where
}
#[inline]
+ fn next_chunk<const N: usize>(
+ &mut self,
+ ) -> Result<[Self::Item; N], array::IntoIter<Self::Item, N>> {
+ let mut array: [MaybeUninit<Self::Item>; N] = MaybeUninit::uninit_array();
+
+ struct Guard<'a, T> {
+ array: &'a mut [MaybeUninit<T>],
+ initialized: usize,
+ }
+
+ impl<T> Drop for Guard<'_, T> {
+ #[inline]
+ fn drop(&mut self) {
+ if const { crate::mem::needs_drop::<T>() } {
+ // SAFETY: self.initialized is always <= N, which also is the length of the array.
+ unsafe {
+ core::ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(
+ self.array.get_unchecked_mut(..self.initialized),
+ ));
+ }
+ }
+ }
+ }
+
+ let mut guard = Guard { array: &mut array, initialized: 0 };
+
+ let result = self.iter.try_for_each(|element| {
+ let idx = guard.initialized;
+ guard.initialized = idx + (self.predicate)(&element) as usize;
+
+ // SAFETY: Loop conditions ensure the index is in bounds.
+ unsafe { guard.array.get_unchecked_mut(idx) }.write(element);
+
+ if guard.initialized < N { ControlFlow::Continue(()) } else { ControlFlow::Break(()) }
+ });
+
+ let guard = ManuallyDrop::new(guard);
+
+ match result {
+ ControlFlow::Break(()) => {
+ // SAFETY: The loop above is only explicitly broken when the array has been fully initialized
+ Ok(unsafe { MaybeUninit::array_assume_init(array) })
+ }
+ ControlFlow::Continue(()) => {
+ let initialized = guard.initialized;
+ // SAFETY: The range is in bounds since the loop breaks when reaching N elements.
+ Err(unsafe { array::IntoIter::new_unchecked(array, 0..initialized) })
+ }
+ }
+ }
+
+ #[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the predicate
diff --git a/library/core/src/iter/adapters/filter_map.rs b/library/core/src/iter/adapters/filter_map.rs
index 6bdf53f7f..693479977 100644
--- a/library/core/src/iter/adapters/filter_map.rs
+++ b/library/core/src/iter/adapters/filter_map.rs
@@ -1,6 +1,7 @@
-use crate::fmt;
use crate::iter::{adapters::SourceIter, FusedIterator, InPlaceIterable};
+use crate::mem::{ManuallyDrop, MaybeUninit};
use crate::ops::{ControlFlow, Try};
+use crate::{array, fmt};
/// An iterator that uses `f` to both filter and map elements from `iter`.
///
@@ -62,6 +63,65 @@ where
}
#[inline]
+ fn next_chunk<const N: usize>(
+ &mut self,
+ ) -> Result<[Self::Item; N], array::IntoIter<Self::Item, N>> {
+ let mut array: [MaybeUninit<Self::Item>; N] = MaybeUninit::uninit_array();
+
+ struct Guard<'a, T> {
+ array: &'a mut [MaybeUninit<T>],
+ initialized: usize,
+ }
+
+ impl<T> Drop for Guard<'_, T> {
+ #[inline]
+ fn drop(&mut self) {
+ if const { crate::mem::needs_drop::<T>() } {
+ // SAFETY: self.initialized is always <= N, which also is the length of the array.
+ unsafe {
+ core::ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(
+ self.array.get_unchecked_mut(..self.initialized),
+ ));
+ }
+ }
+ }
+ }
+
+ let mut guard = Guard { array: &mut array, initialized: 0 };
+
+ let result = self.iter.try_for_each(|element| {
+ let idx = guard.initialized;
+ let val = (self.f)(element);
+ guard.initialized = idx + val.is_some() as usize;
+
+ // SAFETY: Loop conditions ensure the index is in bounds.
+
+ unsafe {
+ let opt_payload_at = core::intrinsics::option_payload_ptr(&val);
+ let dst = guard.array.as_mut_ptr().add(idx);
+ crate::ptr::copy_nonoverlapping(opt_payload_at.cast(), dst, 1);
+ crate::mem::forget(val);
+ };
+
+ if guard.initialized < N { ControlFlow::Continue(()) } else { ControlFlow::Break(()) }
+ });
+
+ let guard = ManuallyDrop::new(guard);
+
+ match result {
+ ControlFlow::Break(()) => {
+ // SAFETY: The loop above is only explicitly broken when the array has been fully initialized
+ Ok(unsafe { MaybeUninit::array_assume_init(array) })
+ }
+ ControlFlow::Continue(()) => {
+ let initialized = guard.initialized;
+ // SAFETY: The range is in bounds since the loop breaks when reaching N elements.
+ Err(unsafe { array::IntoIter::new_unchecked(array, 0..initialized) })
+ }
+ }
+ }
+
+ #[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper) // can't know a lower bound, due to the predicate
diff --git a/library/core/src/iter/adapters/flatten.rs b/library/core/src/iter/adapters/flatten.rs
index 2fd8a5c1d..2568aaf34 100644
--- a/library/core/src/iter/adapters/flatten.rs
+++ b/library/core/src/iter/adapters/flatten.rs
@@ -136,26 +136,12 @@ where
}
#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<T, I, F, const N: usize> TrustedLen for FlatMap<I, [T; N], F>
+unsafe impl<I, U, F> TrustedLen for FlatMap<I, U, F>
where
- I: TrustedLen,
- F: FnMut(I::Item) -> [T; N],
-{
-}
-
-#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<'a, T, I, F, const N: usize> TrustedLen for FlatMap<I, &'a [T; N], F>
-where
- I: TrustedLen,
- F: FnMut(I::Item) -> &'a [T; N],
-{
-}
-
-#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<'a, T, I, F, const N: usize> TrustedLen for FlatMap<I, &'a mut [T; N], F>
-where
- I: TrustedLen,
- F: FnMut(I::Item) -> &'a mut [T; N],
+ I: Iterator,
+ U: IntoIterator,
+ F: FnMut(I::Item) -> U,
+ FlattenCompat<Map<I, F>, <U as IntoIterator>::IntoIter>: TrustedLen,
{
}
@@ -298,8 +284,8 @@ where
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<I> TrustedLen for Flatten<I>
where
- I: TrustedLen,
- <I as Iterator>::Item: TrustedConstSize,
+ I: Iterator<Item: IntoIterator>,
+ FlattenCompat<I, <I::Item as IntoIterator>::IntoIter>: TrustedLen,
{
}
@@ -324,6 +310,7 @@ where
/// Real logic of both `Flatten` and `FlatMap` which simply delegate to
/// this type.
#[derive(Clone, Debug)]
+#[unstable(feature = "trusted_len", issue = "37572")]
struct FlattenCompat<I, U> {
iter: Fuse<I>,
frontiter: Option<U>,
@@ -477,6 +464,7 @@ where
}
}
+#[unstable(feature = "trusted_len", issue = "37572")]
impl<I, U> Iterator for FlattenCompat<I, U>
where
I: Iterator<Item: IntoIterator<IntoIter = U, Item = U::Item>>,
@@ -591,6 +579,7 @@ where
}
}
+#[unstable(feature = "trusted_len", issue = "37572")]
impl<I, U> DoubleEndedIterator for FlattenCompat<I, U>
where
I: DoubleEndedIterator<Item: IntoIterator<IntoIter = U, Item = U::Item>>,
@@ -660,6 +649,30 @@ where
}
}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<const N: usize, I, T> TrustedLen
+ for FlattenCompat<I, <[T; N] as IntoIterator>::IntoIter>
+where
+ I: TrustedLen<Item = [T; N]>,
+{
+}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<'a, const N: usize, I, T> TrustedLen
+ for FlattenCompat<I, <&'a [T; N] as IntoIterator>::IntoIter>
+where
+ I: TrustedLen<Item = &'a [T; N]>,
+{
+}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<'a, const N: usize, I, T> TrustedLen
+ for FlattenCompat<I, <&'a mut [T; N] as IntoIterator>::IntoIter>
+where
+ I: TrustedLen<Item = &'a mut [T; N]>,
+{
+}
+
trait ConstSizeIntoIterator: IntoIterator {
// FIXME(#31844): convert to an associated const once specialization supports that
fn size() -> Option<usize>;
@@ -696,19 +709,6 @@ impl<T, const N: usize> ConstSizeIntoIterator for &mut [T; N] {
}
}
-#[doc(hidden)]
-#[unstable(feature = "std_internals", issue = "none")]
-// FIXME(#20400): Instead of this helper trait there should be multiple impl TrustedLen for Flatten<>
-// blocks with different bounds on Iterator::Item but the compiler erroneously considers them overlapping
-pub unsafe trait TrustedConstSize: IntoIterator {}
-
-#[unstable(feature = "std_internals", issue = "none")]
-unsafe impl<T, const N: usize> TrustedConstSize for [T; N] {}
-#[unstable(feature = "std_internals", issue = "none")]
-unsafe impl<T, const N: usize> TrustedConstSize for &'_ [T; N] {}
-#[unstable(feature = "std_internals", issue = "none")]
-unsafe impl<T, const N: usize> TrustedConstSize for &'_ mut [T; N] {}
-
#[inline]
fn and_then_or_clear<T, U>(opt: &mut Option<T>, f: impl FnOnce(&mut T) -> Option<U>) -> Option<U> {
let x = f(opt.as_mut()?);
diff --git a/library/core/src/iter/range.rs b/library/core/src/iter/range.rs
index 37db07429..0171d8981 100644
--- a/library/core/src/iter/range.rs
+++ b/library/core/src/iter/range.rs
@@ -732,12 +732,18 @@ impl<A: Step> Iterator for ops::Range<A> {
}
#[inline]
- fn min(mut self) -> Option<A> {
+ fn min(mut self) -> Option<A>
+ where
+ A: Ord,
+ {
self.next()
}
#[inline]
- fn max(mut self) -> Option<A> {
+ fn max(mut self) -> Option<A>
+ where
+ A: Ord,
+ {
self.next_back()
}
@@ -1158,12 +1164,18 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
}
#[inline]
- fn min(mut self) -> Option<A> {
+ fn min(mut self) -> Option<A>
+ where
+ A: Ord,
+ {
self.next()
}
#[inline]
- fn max(mut self) -> Option<A> {
+ fn max(mut self) -> Option<A>
+ where
+ A: Ord,
+ {
self.next_back()
}
diff --git a/library/core/src/iter/sources/empty.rs b/library/core/src/iter/sources/empty.rs
index 617dfd123..243df015f 100644
--- a/library/core/src/iter/sources/empty.rs
+++ b/library/core/src/iter/sources/empty.rs
@@ -81,8 +81,7 @@ impl<T> Clone for Empty<T> {
// not #[derive] because that adds a Default bound on T,
// which isn't necessary.
#[stable(feature = "iter_empty", since = "1.2.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl<T> const Default for Empty<T> {
+impl<T> Default for Empty<T> {
fn default() -> Empty<T> {
Empty(marker::PhantomData)
}
diff --git a/library/core/src/iter/traits/collect.rs b/library/core/src/iter/traits/collect.rs
index e099700e3..0675e5635 100644
--- a/library/core/src/iter/traits/collect.rs
+++ b/library/core/src/iter/traits/collect.rs
@@ -95,6 +95,16 @@
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
on(
+ _Self = "&[{A}]",
+ message = "a slice of type `{Self}` cannot be built since we need to store the elements somewhere",
+ label = "try explicitly collecting into a `Vec<{A}>`",
+ ),
+ on(
+ all(A = "{integer}", any(_Self = "&[{integral}]",)),
+ message = "a slice of type `{Self}` cannot be built since we need to store the elements somewhere",
+ label = "try explicitly collecting into a `Vec<{A}>`",
+ ),
+ on(
_Self = "[{A}]",
message = "a slice of type `{Self}` cannot be built since `{Self}` has no definite size",
label = "try explicitly collecting into a `Vec<{A}>`",
@@ -228,7 +238,6 @@ pub trait FromIterator<A>: Sized {
#[rustc_diagnostic_item = "IntoIterator"]
#[rustc_skip_array_during_method_dispatch]
#[stable(feature = "rust1", since = "1.0.0")]
-#[const_trait]
pub trait IntoIterator {
/// The type of the elements being iterated over.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -264,7 +273,7 @@ pub trait IntoIterator {
#[rustc_const_unstable(feature = "const_intoiterator_identity", issue = "90603")]
#[stable(feature = "rust1", since = "1.0.0")]
-impl<I: Iterator> const IntoIterator for I {
+impl<I: Iterator> IntoIterator for I {
type Item = I::Item;
type IntoIter = I;
diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs
index 028776042..dabfce144 100644
--- a/library/core/src/iter/traits/iterator.rs
+++ b/library/core/src/iter/traits/iterator.rs
@@ -70,7 +70,6 @@ fn _assert_is_object_safe(_: &dyn Iterator<Item = ()>) {}
#[doc(notable_trait)]
#[rustc_diagnostic_item = "Iterator"]
#[must_use = "iterators are lazy and do nothing unless consumed"]
-#[const_trait]
pub trait Iterator {
/// The type of the elements being iterated over.
#[rustc_diagnostic_item = "IteratorItem"]
diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs
index 04243544b..6c419eb16 100644
--- a/library/core/src/lib.rs
+++ b/library/core/src/lib.rs
@@ -107,16 +107,13 @@
#![feature(const_arguments_as_str)]
#![feature(const_array_from_ref)]
#![feature(const_array_into_iter_constructors)]
+#![feature(const_assume)]
#![feature(const_bigint_helper_methods)]
#![feature(const_black_box)]
#![feature(const_caller_location)]
#![feature(const_cell_into_inner)]
#![feature(const_char_from_u32_unchecked)]
-#![feature(const_clone)]
-#![feature(const_cmp)]
-#![feature(const_convert)]
#![feature(const_cstr_methods)]
-#![feature(const_default_impls)]
#![feature(const_discriminant)]
#![feature(const_eval_select)]
#![feature(const_exact_div)]
@@ -131,14 +128,12 @@
#![feature(const_intrinsic_forget)]
#![feature(const_ipv4)]
#![feature(const_ipv6)]
-#![feature(const_is_char_boundary)]
#![feature(const_likely)]
#![feature(const_maybe_uninit_as_mut_ptr)]
#![feature(const_maybe_uninit_assume_init)]
#![feature(const_maybe_uninit_uninit_array)]
#![feature(const_nonnull_new)]
-#![feature(const_num_from_num)]
-#![feature(const_ops)]
+#![feature(const_num_midpoint)]
#![feature(const_option)]
#![feature(const_option_ext)]
#![feature(const_pin)]
@@ -146,22 +141,20 @@
#![feature(const_pointer_is_aligned)]
#![feature(const_ptr_as_ref)]
#![feature(const_ptr_is_null)]
-#![feature(const_ptr_read)]
#![feature(const_ptr_sub_ptr)]
#![feature(const_ptr_write)]
#![feature(const_raw_ptr_comparison)]
#![feature(const_replace)]
-#![feature(const_result_drop)]
#![feature(const_size_of_val)]
#![feature(const_size_of_val_raw)]
#![feature(const_slice_from_raw_parts_mut)]
#![feature(const_slice_from_ref)]
#![feature(const_slice_index)]
+#![feature(const_slice_is_ascii)]
#![feature(const_slice_ptr_len)]
#![feature(const_slice_split_at_mut)]
#![feature(const_str_from_utf8_unchecked_mut)]
#![feature(const_swap)]
-#![feature(const_trait_impl)]
#![feature(const_transmute_copy)]
#![feature(const_try)]
#![feature(const_type_id)]
@@ -171,6 +164,7 @@
#![feature(const_waker)]
#![feature(core_panic)]
#![feature(duration_consts_float)]
+#![feature(internal_impls_macro)]
#![feature(ip)]
#![feature(is_ascii_octdigit)]
#![feature(maybe_uninit_uninit_array)]
@@ -207,9 +201,9 @@
#![feature(const_mut_refs)]
#![feature(const_precise_live_drops)]
#![feature(const_refs_to_cell)]
+#![feature(const_trait_impl)]
#![feature(decl_macro)]
#![feature(deprecated_suggestion)]
-#![feature(derive_const)]
#![feature(doc_cfg)]
#![feature(doc_cfg_hide)]
#![feature(doc_notable_trait)]
@@ -222,6 +216,7 @@
#![feature(intra_doc_pointers)]
#![feature(intrinsics)]
#![feature(lang_items)]
+#![feature(let_chains)]
#![feature(link_llvm_intrinsics)]
#![feature(macro_metavar_expr)]
#![feature(min_specialization)]
@@ -385,6 +380,7 @@ pub mod alloc;
// note: does not need to be public
mod bool;
+mod escape;
mod tuple;
mod unit;
diff --git a/library/core/src/macros/mod.rs b/library/core/src/macros/mod.rs
index 7c93c93b4..c4134dbcd 100644
--- a/library/core/src/macros/mod.rs
+++ b/library/core/src/macros/mod.rs
@@ -498,7 +498,6 @@ macro_rules! r#try {
/// In a `no_std` setup you are responsible for the implementation details of the components.
///
/// ```no_run
-/// # extern crate core;
/// use core::fmt::Write;
///
/// struct Example;
@@ -1428,7 +1427,7 @@ pub(crate) mod builtin {
#[rustc_builtin_macro]
#[macro_export]
#[rustc_diagnostic_item = "assert_macro"]
- #[allow_internal_unstable(core_panic, edition_panic)]
+ #[allow_internal_unstable(core_panic, edition_panic, generic_assert_internals)]
macro_rules! assert {
($cond:expr $(,)?) => {{ /* compiler built-in */ }};
($cond:expr, $($arg:tt)+) => {{ /* compiler built-in */ }};
diff --git a/library/core/src/macros/panic.md b/library/core/src/macros/panic.md
index 98fb7e9e4..8b549e187 100644
--- a/library/core/src/macros/panic.md
+++ b/library/core/src/macros/panic.md
@@ -42,7 +42,7 @@ the successful result of some computation, `Ok(T)`, or error types that
represent an anticipated runtime failure mode of that computation, `Err(E)`.
`Result` is used alongside user defined types which represent the various
anticipated runtime failure modes that the associated computation could
-encounter. `Result` must be propagated manually, often with the the help of the
+encounter. `Result` must be propagated manually, often with the help of the
`?` operator and `Try` trait, and they must be reported manually, often with
the help of the `Error` trait.
diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs
index 3cd4f5104..8dab8d1a6 100644
--- a/library/core/src/marker.rs
+++ b/library/core/src/marker.rs
@@ -12,6 +12,50 @@ use crate::fmt::Debug;
use crate::hash::Hash;
use crate::hash::Hasher;
+/// Implements a given marker trait for multiple types at the same time.
+///
+/// The basic syntax looks like this:
+/// ```ignore private macro
+/// marker_impls! { MarkerTrait for u8, i8 }
+/// ```
+/// You can also implement `unsafe` traits
+/// ```ignore private macro
+/// marker_impls! { unsafe MarkerTrait for u8, i8 }
+/// ```
+/// Add attributes to all impls:
+/// ```ignore private macro
+/// marker_impls! {
+/// #[allow(lint)]
+/// #[unstable(feature = "marker_trait", issue = "none")]
+/// MarkerTrait for u8, i8
+/// }
+/// ```
+/// And use generics:
+/// ```ignore private macro
+/// marker_impls! {
+/// MarkerTrait for
+/// u8, i8,
+/// {T: ?Sized} *const T,
+/// {T: ?Sized} *mut T,
+/// {T: MarkerTrait} PhantomData<T>,
+/// u32,
+/// }
+/// ```
+#[unstable(feature = "internal_impls_macro", issue = "none")]
+macro marker_impls {
+ ( $(#[$($meta:tt)*])* $Trait:ident for $({$($bounds:tt)*})? $T:ty $(, $($rest:tt)*)? ) => {
+ $(#[$($meta)*])* impl< $($($bounds)*)? > $Trait for $T {}
+ marker_impls! { $(#[$($meta)*])* $Trait for $($($rest)*)? }
+ },
+ ( $(#[$($meta:tt)*])* $Trait:ident for ) => {},
+
+ ( $(#[$($meta:tt)*])* unsafe $Trait:ident for $({$($bounds:tt)*})? $T:ty $(, $($rest:tt)*)? ) => {
+ $(#[$($meta)*])* unsafe impl< $($($bounds)*)? > $Trait for $T {}
+ marker_impls! { $(#[$($meta)*])* unsafe $Trait for $($($rest)*)? }
+ },
+ ( $(#[$($meta:tt)*])* unsafe $Trait:ident for ) => {},
+}
+
/// Types that can be transferred across thread boundaries.
///
/// This trait is automatically implemented when the compiler determines it's
@@ -24,7 +68,7 @@ use crate::hash::Hasher;
/// operations. Its cousin [`sync::Arc`][arc] does use atomic operations (incurring
/// some overhead) and thus is `Send`.
///
-/// See [the Nomicon](../../nomicon/send-and-sync.html) for more details.
+/// See [the Nomicon](../../nomicon/send-and-sync.html) and the [`Sync`] trait for more details.
///
/// [`Rc`]: ../../std/rc/struct.Rc.html
/// [arc]: ../../std/sync/struct.Arc.html
@@ -214,6 +258,20 @@ pub trait StructuralEq {
// Empty.
}
+// FIXME: Remove special cases of these types from the compiler pattern checking code and always check `T: StructuralEq` instead
+marker_impls! {
+ #[unstable(feature = "structural_match", issue = "31434")]
+ StructuralEq for
+ usize, u8, u16, u32, u64, u128,
+ isize, i8, i16, i32, i64, i128,
+ bool,
+ char,
+ str /* Technically requires `[u8]: StructuralEq` */,
+ {T, const N: usize} [T; N],
+ {T} [T],
+ {T: ?Sized} &T,
+}
+
/// Types whose values can be duplicated simply by copying bits.
///
/// By default, variable bindings have 'move semantics.' In other
@@ -401,6 +459,30 @@ pub macro Copy($item:item) {
/* compiler built-in */
}
+// Implementations of `Copy` for primitive types.
+//
+// Implementations that cannot be described in Rust
+// are implemented in `traits::SelectionContext::copy_clone_conditions()`
+// in `rustc_trait_selection`.
+marker_impls! {
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Copy for
+ usize, u8, u16, u32, u64, u128,
+ isize, i8, i16, i32, i64, i128,
+ f32, f64,
+ bool, char,
+ {T: ?Sized} *const T,
+ {T: ?Sized} *mut T,
+
+}
+
+#[unstable(feature = "never_type", issue = "35121")]
+impl Copy for ! {}
+
+/// Shared references can be copied, but mutable references *cannot*!
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Copy for &T {}
+
/// Types for which it is safe to share references between threads.
///
/// This trait is automatically implemented when the compiler determines
@@ -426,6 +508,11 @@ pub macro Copy($item:item) {
/// becomes read-only, as if it were a `& &T`. Hence there is no risk
/// of a data race.
///
+/// A shorter overview of how [`Sync`] and [`Send`] relate to referencing:
+/// * `&T` is [`Send`] if and only if `T` is [`Sync`]
+/// * `&mut T` is [`Send`] if and only if `T` is [`Send`]
+/// * `&T` and `&mut T` are [`Sync`] if and only if `T` is [`Sync`]
+///
/// Types that are not `Sync` are those that have "interior
/// mutability" in a non-thread-safe form, such as [`Cell`][cell]
/// and [`RefCell`][refcell]. These types allow for mutation of
@@ -591,14 +678,14 @@ impl<T: ?Sized> !Sync for *mut T {}
/// use std::marker::PhantomData;
///
/// # #[allow(dead_code)]
-/// struct Slice<'a, T: 'a> {
+/// struct Slice<'a, T> {
/// start: *const T,
/// end: *const T,
/// phantom: PhantomData<&'a T>,
/// }
/// ```
///
-/// This also in turn requires the annotation `T: 'a`, indicating
+/// This also in turn infers the lifetime bound `T: 'a`, indicating
/// that any references in `T` are valid over the lifetime `'a`.
///
/// When initializing a `Slice` you simply provide the value
@@ -607,7 +694,7 @@ impl<T: ?Sized> !Sync for *mut T {}
/// ```
/// # #![allow(dead_code)]
/// # use std::marker::PhantomData;
-/// # struct Slice<'a, T: 'a> {
+/// # struct Slice<'a, T> {
/// # start: *const T,
/// # end: *const T,
/// # phantom: PhantomData<&'a T>,
@@ -669,16 +756,11 @@ impl<T: ?Sized> !Sync for *mut T {}
///
/// ## Ownership and the drop check
///
-/// Adding a field of type `PhantomData<T>` indicates that your
-/// type owns data of type `T`. This in turn implies that when your
-/// type is dropped, it may drop one or more instances of the type
-/// `T`. This has bearing on the Rust compiler's [drop check]
-/// analysis.
+/// The exact interaction of `PhantomData` with drop check **may change in the future**.
///
-/// If your struct does not in fact *own* the data of type `T`, it is
-/// better to use a reference type, like `PhantomData<&'a T>`
-/// (ideally) or `PhantomData<*const T>` (if no lifetime applies), so
-/// as not to indicate ownership.
+/// Currently, adding a field of type `PhantomData<T>` indicates that your type *owns* data of type
+/// `T` in very rare circumstances. This in turn has effects on the Rust compiler's [drop check]
+/// analysis. For the exact rules, see the [drop check] documentation.
///
/// ## Layout
///
@@ -686,7 +768,7 @@ impl<T: ?Sized> !Sync for *mut T {}
/// * `size_of::<PhantomData<T>>() == 0`
/// * `align_of::<PhantomData<T>>() == 1`
///
-/// [drop check]: ../../nomicon/dropck.html
+/// [drop check]: Drop#drop-check
#[lang = "phantom_data"]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct PhantomData<T: ?Sized>;
@@ -732,8 +814,7 @@ impl<T: ?Sized> Clone for PhantomData<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl<T: ?Sized> const Default for PhantomData<T> {
+impl<T: ?Sized> Default for PhantomData<T> {
fn default() -> Self {
Self
}
@@ -774,11 +855,14 @@ pub trait DiscriminantKind {
pub(crate) unsafe auto trait Freeze {}
impl<T: ?Sized> !Freeze for UnsafeCell<T> {}
-unsafe impl<T: ?Sized> Freeze for PhantomData<T> {}
-unsafe impl<T: ?Sized> Freeze for *const T {}
-unsafe impl<T: ?Sized> Freeze for *mut T {}
-unsafe impl<T: ?Sized> Freeze for &T {}
-unsafe impl<T: ?Sized> Freeze for &mut T {}
+marker_impls! {
+ unsafe Freeze for
+ {T: ?Sized} PhantomData<T>,
+ {T: ?Sized} *const T,
+ {T: ?Sized} *mut T,
+ {T: ?Sized} &T,
+ {T: ?Sized} &mut T,
+}
/// Types that can be safely moved after being pinned.
///
@@ -839,17 +923,19 @@ pub struct PhantomPinned;
#[stable(feature = "pin", since = "1.33.0")]
impl !Unpin for PhantomPinned {}
-#[stable(feature = "pin", since = "1.33.0")]
-impl<'a, T: ?Sized + 'a> Unpin for &'a T {}
-
-#[stable(feature = "pin", since = "1.33.0")]
-impl<'a, T: ?Sized + 'a> Unpin for &'a mut T {}
-
-#[stable(feature = "pin_raw", since = "1.38.0")]
-impl<T: ?Sized> Unpin for *const T {}
+marker_impls! {
+ #[stable(feature = "pin", since = "1.33.0")]
+ Unpin for
+ {T: ?Sized} &T,
+ {T: ?Sized} &mut T,
+}
-#[stable(feature = "pin_raw", since = "1.38.0")]
-impl<T: ?Sized> Unpin for *mut T {}
+marker_impls! {
+ #[stable(feature = "pin_raw", since = "1.38.0")]
+ Unpin for
+ {T: ?Sized} *const T,
+ {T: ?Sized} *mut T,
+}
/// A marker for types that can be dropped.
///
@@ -858,8 +944,8 @@ impl<T: ?Sized> Unpin for *mut T {}
#[unstable(feature = "const_trait_impl", issue = "67792")]
#[lang = "destruct"]
#[rustc_on_unimplemented(message = "can't drop `{Self}`", append_const_msg)]
-#[const_trait]
#[rustc_deny_explicit_impl]
+#[const_trait]
pub trait Destruct {}
/// A marker for tuple types.
@@ -884,43 +970,33 @@ pub trait Tuple {}
)]
pub trait PointerLike {}
-/// Implementations of `Copy` for primitive types.
-///
-/// Implementations that cannot be described in Rust
-/// are implemented in `traits::SelectionContext::copy_clone_conditions()`
-/// in `rustc_trait_selection`.
-mod copy_impls {
-
- use super::Copy;
-
- macro_rules! impl_copy {
- ($($t:ty)*) => {
- $(
- #[stable(feature = "rust1", since = "1.0.0")]
- impl Copy for $t {}
- )*
- }
- }
-
- impl_copy! {
- usize u8 u16 u32 u64 u128
- isize i8 i16 i32 i64 i128
- f32 f64
- bool char
- }
-
- #[unstable(feature = "never_type", issue = "35121")]
- impl Copy for ! {}
-
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<T: ?Sized> Copy for *const T {}
+/// A marker for types which can be used as types of `const` generic parameters.
+#[cfg_attr(not(bootstrap), lang = "const_param_ty")]
+#[unstable(feature = "adt_const_params", issue = "95174")]
+#[rustc_on_unimplemented(message = "`{Self}` can't be used as a const parameter type")]
+pub trait ConstParamTy: StructuralEq {}
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<T: ?Sized> Copy for *mut T {}
+/// Derive macro generating an impl of the trait `ConstParamTy`.
+#[rustc_builtin_macro]
+#[unstable(feature = "adt_const_params", issue = "95174")]
+#[cfg(not(bootstrap))]
+pub macro ConstParamTy($item:item) {
+ /* compiler built-in */
+}
- /// Shared references can be copied, but mutable references *cannot*!
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<T: ?Sized> Copy for &T {}
+// FIXME(generic_const_parameter_types): handle `ty::FnDef`/`ty::Closure`
+// FIXME(generic_const_parameter_types): handle `ty::Tuple`
+marker_impls! {
+ #[unstable(feature = "adt_const_params", issue = "95174")]
+ ConstParamTy for
+ usize, u8, u16, u32, u64, u128,
+ isize, i8, i16, i32, i64, i128,
+ bool,
+ char,
+ str /* Technically requires `[u8]: ConstParamTy` */,
+ {T: ConstParamTy, const N: usize} [T; N],
+ {T: ConstParamTy} [T],
+ {T: ?Sized + ConstParamTy} &T,
}
/// A common trait implemented by all function pointers.
@@ -930,7 +1006,6 @@ mod copy_impls {
reason = "internal trait for implementing various traits for all function pointers"
)]
#[lang = "fn_ptr_trait"]
-#[cfg(not(bootstrap))]
#[rustc_deny_explicit_impl]
pub trait FnPtr: Copy + Clone {
/// Returns the address of the function pointer.
diff --git a/library/core/src/mem/manually_drop.rs b/library/core/src/mem/manually_drop.rs
index 3d719afe4..5f3d66e37 100644
--- a/library/core/src/mem/manually_drop.rs
+++ b/library/core/src/mem/manually_drop.rs
@@ -146,8 +146,7 @@ impl<T: ?Sized> ManuallyDrop<T> {
}
#[stable(feature = "manually_drop", since = "1.20.0")]
-#[rustc_const_unstable(feature = "const_deref", issue = "88955")]
-impl<T: ?Sized> const Deref for ManuallyDrop<T> {
+impl<T: ?Sized> Deref for ManuallyDrop<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
@@ -156,8 +155,7 @@ impl<T: ?Sized> const Deref for ManuallyDrop<T> {
}
#[stable(feature = "manually_drop", since = "1.20.0")]
-#[rustc_const_unstable(feature = "const_deref", issue = "88955")]
-impl<T: ?Sized> const DerefMut for ManuallyDrop<T> {
+impl<T: ?Sized> DerefMut for ManuallyDrop<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut T {
&mut self.value
diff --git a/library/core/src/mem/maybe_uninit.rs b/library/core/src/mem/maybe_uninit.rs
index 9c6d48675..d09a24b4b 100644
--- a/library/core/src/mem/maybe_uninit.rs
+++ b/library/core/src/mem/maybe_uninit.rs
@@ -945,14 +945,10 @@ impl<T> MaybeUninit<T> {
// * `MaybeUninit<T>` and T are guaranteed to have the same layout
// * `MaybeUninit` does not drop, so there are no double-frees
// And thus the conversion is safe
- let ret = unsafe {
+ unsafe {
intrinsics::assert_inhabited::<[T; N]>();
- (&array as *const _ as *const [T; N]).read()
- };
-
- // FIXME: required to avoid `~const Destruct` bound
- super::forget(array);
- ret
+ intrinsics::transmute_unchecked(array)
+ }
}
/// Assuming all the elements are initialized, get a slice to them.
@@ -1291,7 +1287,7 @@ impl<T, const N: usize> MaybeUninit<[T; N]> {
#[inline]
pub const fn transpose(self) -> [MaybeUninit<T>; N] {
// SAFETY: T and MaybeUninit<T> have the same layout
- unsafe { super::transmute_copy(&ManuallyDrop::new(self)) }
+ unsafe { intrinsics::transmute_unchecked(self) }
}
}
@@ -1311,6 +1307,6 @@ impl<T, const N: usize> [MaybeUninit<T>; N] {
#[inline]
pub const fn transpose(self) -> MaybeUninit<[T; N]> {
// SAFETY: T and MaybeUninit<T> have the same layout
- unsafe { super::transmute_copy(&ManuallyDrop::new(self)) }
+ unsafe { intrinsics::transmute_unchecked(self) }
}
}
diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs
index a67df7ed5..afbfd6d36 100644
--- a/library/core/src/mem/mod.rs
+++ b/library/core/src/mem/mod.rs
@@ -170,7 +170,7 @@ pub fn forget_unsized<T: ?Sized>(t: T) {
///
/// The following table gives the size for primitives.
///
-/// Type | size_of::\<Type>()
+/// Type | `size_of::<Type>()`
/// ---- | ---------------
/// () | 0
/// bool | 1
@@ -190,8 +190,8 @@ pub fn forget_unsized<T: ?Sized>(t: T) {
///
/// Furthermore, `usize` and `isize` have the same size.
///
-/// The types `*const T`, `&T`, `Box<T>`, `Option<&T>`, and `Option<Box<T>>` all have
-/// the same size. If `T` is Sized, all of those types have the same size as `usize`.
+/// The types [`*const T`], `&T`, [`Box<T>`], [`Option<&T>`], and `Option<Box<T>>` all have
+/// the same size. If `T` is `Sized`, all of those types have the same size as `usize`.
///
/// The mutability of a pointer does not change its size. As such, `&T` and `&mut T`
/// have the same size. Likewise for `*const T` and `*mut T`.
@@ -203,7 +203,7 @@ pub fn forget_unsized<T: ?Sized>(t: T) {
///
/// ## Size of Structs
///
-/// For `structs`, the size is determined by the following algorithm.
+/// For `struct`s, the size is determined by the following algorithm.
///
/// For each field in the struct ordered by declaration order:
///
@@ -299,6 +299,10 @@ pub fn forget_unsized<T: ?Sized>(t: T) {
/// ```
///
/// [alignment]: align_of
+/// [`*const T`]: primitive@pointer
+/// [`Box<T>`]: ../../std/boxed/struct.Box.html
+/// [`Option<&T>`]: crate::option::Option
+///
#[inline(always)]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
@@ -311,7 +315,7 @@ pub const fn size_of<T>() -> usize {
/// Returns the size of the pointed-to value in bytes.
///
-/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
+/// This is usually the same as [`size_of::<T>()`]. However, when `T` *has* no
/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val` can be used to get the dynamically-known size.
///
@@ -328,6 +332,8 @@ pub const fn size_of<T>() -> usize {
/// let y: &[u8] = &x;
/// assert_eq!(13, mem::size_of_val(y));
/// ```
+///
+/// [`size_of::<T>()`]: size_of
#[inline]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
@@ -340,7 +346,7 @@ pub const fn size_of_val<T: ?Sized>(val: &T) -> usize {
/// Returns the size of the pointed-to value in bytes.
///
-/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
+/// This is usually the same as [`size_of::<T>()`]. However, when `T` *has* no
/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val_raw` can be used to get the dynamically-known size.
///
@@ -363,6 +369,7 @@ pub const fn size_of_val<T: ?Sized>(val: &T) -> usize {
/// [`size_of_val`] on a reference to a type with an extern type tail.
/// - otherwise, it is conservatively not allowed to call this function.
///
+/// [`size_of::<T>()`]: size_of
/// [trait object]: ../../book/ch17-02-trait-objects.html
/// [extern type]: ../../unstable-book/language-features/extern-types.html
///
@@ -961,6 +968,7 @@ pub const fn replace<T>(dest: &mut T, src: T) -> T {
/// Integers and other types implementing [`Copy`] are unaffected by `drop`.
///
/// ```
+/// # #![cfg_attr(not(bootstrap), allow(dropping_copy_types))]
/// #[derive(Copy, Clone)]
/// struct Foo(u8);
///
@@ -1272,3 +1280,45 @@ pub trait SizedTypeProperties: Sized {
#[doc(hidden)]
#[unstable(feature = "sized_type_properties", issue = "none")]
impl<T> SizedTypeProperties for T {}
+
+/// Expands to the offset in bytes of a field from the beginning of the given type.
+///
+/// Only structs, unions and tuples are supported.
+///
+/// Nested field accesses may be used, but not array indexes like in `C`'s `offsetof`.
+///
+/// Note that the output of this macro is not stable, except for `#[repr(C)]` types.
+///
+/// # Examples
+///
+/// ```
+/// #![feature(offset_of)]
+///
+/// use std::mem;
+/// #[repr(C)]
+/// struct FieldStruct {
+/// first: u8,
+/// second: u16,
+/// third: u8
+/// }
+///
+/// assert_eq!(mem::offset_of!(FieldStruct, first), 0);
+/// assert_eq!(mem::offset_of!(FieldStruct, second), 2);
+/// assert_eq!(mem::offset_of!(FieldStruct, third), 4);
+///
+/// #[repr(C)]
+/// struct NestedA {
+/// b: NestedB
+/// }
+///
+/// #[repr(C)]
+/// struct NestedB(u8);
+///
+/// assert_eq!(mem::offset_of!(NestedA, b.0), 0);
+/// ```
+#[cfg(not(bootstrap))]
+#[unstable(feature = "offset_of", issue = "106655")]
+#[allow_internal_unstable(builtin_syntax)]
+pub macro offset_of($Container:ty, $($fields:tt).+ $(,)?) {
+ builtin # offset_of($Container, $($fields).+)
+}
diff --git a/library/core/src/mem/transmutability.rs b/library/core/src/mem/transmutability.rs
index b53a330fa..87ae30619 100644
--- a/library/core/src/mem/transmutability.rs
+++ b/library/core/src/mem/transmutability.rs
@@ -81,8 +81,7 @@ impl Assume {
// FIXME(jswrenn): This const op is not actually usable. Why?
// https://github.com/rust-lang/rust/pull/100726#issuecomment-1219928926
#[unstable(feature = "transmutability", issue = "99571")]
-#[rustc_const_unstable(feature = "transmutability", issue = "99571")]
-impl const core::ops::Add for Assume {
+impl core::ops::Add for Assume {
type Output = Assume;
fn add(self, other_assumptions: Assume) -> Assume {
@@ -93,8 +92,7 @@ impl const core::ops::Add for Assume {
// FIXME(jswrenn): This const op is not actually usable. Why?
// https://github.com/rust-lang/rust/pull/100726#issuecomment-1219928926
#[unstable(feature = "transmutability", issue = "99571")]
-#[rustc_const_unstable(feature = "transmutability", issue = "99571")]
-impl const core::ops::Sub for Assume {
+impl core::ops::Sub for Assume {
type Output = Assume;
fn sub(self, other_assumptions: Assume) -> Assume {
diff --git a/library/core/src/net/socket_addr.rs b/library/core/src/net/socket_addr.rs
index 2d48e2715..8396aecf9 100644
--- a/library/core/src/net/socket_addr.rs
+++ b/library/core/src/net/socket_addr.rs
@@ -122,6 +122,7 @@ impl SocketAddr {
#[stable(feature = "ip_addr", since = "1.7.0")]
#[must_use]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn new(ip: IpAddr, port: u16) -> SocketAddr {
match ip {
IpAddr::V4(a) => SocketAddr::V4(SocketAddrV4::new(a, port)),
@@ -142,6 +143,7 @@ impl SocketAddr {
#[must_use]
#[stable(feature = "ip_addr", since = "1.7.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn ip(&self) -> IpAddr {
match *self {
SocketAddr::V4(ref a) => IpAddr::V4(*a.ip()),
@@ -161,6 +163,7 @@ impl SocketAddr {
/// assert_eq!(socket.ip(), IpAddr::V4(Ipv4Addr::new(10, 10, 0, 1)));
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_ip(&mut self, new_ip: IpAddr) {
// `match (*self, new_ip)` would have us mutate a copy of self only to throw it away.
match (self, new_ip) {
@@ -183,6 +186,7 @@ impl SocketAddr {
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn port(&self) -> u16 {
match *self {
SocketAddr::V4(ref a) => a.port(),
@@ -202,6 +206,7 @@ impl SocketAddr {
/// assert_eq!(socket.port(), 1025);
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_port(&mut self, new_port: u16) {
match *self {
SocketAddr::V4(ref mut a) => a.set_port(new_port),
@@ -227,6 +232,7 @@ impl SocketAddr {
#[must_use]
#[stable(feature = "sockaddr_checker", since = "1.16.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn is_ipv4(&self) -> bool {
matches!(*self, SocketAddr::V4(_))
}
@@ -249,6 +255,7 @@ impl SocketAddr {
#[must_use]
#[stable(feature = "sockaddr_checker", since = "1.16.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn is_ipv6(&self) -> bool {
matches!(*self, SocketAddr::V6(_))
}
@@ -269,6 +276,7 @@ impl SocketAddrV4 {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn new(ip: Ipv4Addr, port: u16) -> SocketAddrV4 {
SocketAddrV4 { ip, port }
}
@@ -286,6 +294,7 @@ impl SocketAddrV4 {
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn ip(&self) -> &Ipv4Addr {
&self.ip
}
@@ -302,6 +311,7 @@ impl SocketAddrV4 {
/// assert_eq!(socket.ip(), &Ipv4Addr::new(192, 168, 0, 1));
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_ip(&mut self, new_ip: Ipv4Addr) {
self.ip = new_ip;
}
@@ -319,6 +329,7 @@ impl SocketAddrV4 {
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn port(&self) -> u16 {
self.port
}
@@ -335,6 +346,7 @@ impl SocketAddrV4 {
/// assert_eq!(socket.port(), 4242);
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_port(&mut self, new_port: u16) {
self.port = new_port;
}
@@ -360,6 +372,7 @@ impl SocketAddrV6 {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn new(ip: Ipv6Addr, port: u16, flowinfo: u32, scope_id: u32) -> SocketAddrV6 {
SocketAddrV6 { ip, port, flowinfo, scope_id }
}
@@ -377,6 +390,7 @@ impl SocketAddrV6 {
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn ip(&self) -> &Ipv6Addr {
&self.ip
}
@@ -393,6 +407,7 @@ impl SocketAddrV6 {
/// assert_eq!(socket.ip(), &Ipv6Addr::new(76, 45, 0, 0, 0, 0, 0, 0));
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_ip(&mut self, new_ip: Ipv6Addr) {
self.ip = new_ip;
}
@@ -410,6 +425,7 @@ impl SocketAddrV6 {
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn port(&self) -> u16 {
self.port
}
@@ -426,6 +442,7 @@ impl SocketAddrV6 {
/// assert_eq!(socket.port(), 4242);
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_port(&mut self, new_port: u16) {
self.port = new_port;
}
@@ -453,6 +470,7 @@ impl SocketAddrV6 {
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn flowinfo(&self) -> u32 {
self.flowinfo
}
@@ -471,6 +489,7 @@ impl SocketAddrV6 {
/// assert_eq!(socket.flowinfo(), 56);
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_flowinfo(&mut self, new_flowinfo: u32) {
self.flowinfo = new_flowinfo;
}
@@ -493,6 +512,7 @@ impl SocketAddrV6 {
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_socketaddr", since = "1.69.0")]
+ #[inline]
pub const fn scope_id(&self) -> u32 {
self.scope_id
}
@@ -511,6 +531,7 @@ impl SocketAddrV6 {
/// assert_eq!(socket.scope_id(), 42);
/// ```
#[stable(feature = "sockaddr_setters", since = "1.9.0")]
+ #[inline]
pub fn set_scope_id(&mut self, new_scope_id: u32) {
self.scope_id = new_scope_id;
}
@@ -519,6 +540,7 @@ impl SocketAddrV6 {
#[stable(feature = "ip_from_ip", since = "1.16.0")]
impl From<SocketAddrV4> for SocketAddr {
/// Converts a [`SocketAddrV4`] into a [`SocketAddr::V4`].
+ #[inline]
fn from(sock4: SocketAddrV4) -> SocketAddr {
SocketAddr::V4(sock4)
}
@@ -527,6 +549,7 @@ impl From<SocketAddrV4> for SocketAddr {
#[stable(feature = "ip_from_ip", since = "1.16.0")]
impl From<SocketAddrV6> for SocketAddr {
/// Converts a [`SocketAddrV6`] into a [`SocketAddr::V6`].
+ #[inline]
fn from(sock6: SocketAddrV6) -> SocketAddr {
SocketAddr::V6(sock6)
}
@@ -624,6 +647,7 @@ impl fmt::Debug for SocketAddrV6 {
#[stable(feature = "socketaddr_ordering", since = "1.45.0")]
impl PartialOrd for SocketAddrV4 {
+ #[inline]
fn partial_cmp(&self, other: &SocketAddrV4) -> Option<Ordering> {
Some(self.cmp(other))
}
@@ -631,6 +655,7 @@ impl PartialOrd for SocketAddrV4 {
#[stable(feature = "socketaddr_ordering", since = "1.45.0")]
impl PartialOrd for SocketAddrV6 {
+ #[inline]
fn partial_cmp(&self, other: &SocketAddrV6) -> Option<Ordering> {
Some(self.cmp(other))
}
@@ -638,6 +663,7 @@ impl PartialOrd for SocketAddrV6 {
#[stable(feature = "socketaddr_ordering", since = "1.45.0")]
impl Ord for SocketAddrV4 {
+ #[inline]
fn cmp(&self, other: &SocketAddrV4) -> Ordering {
self.ip().cmp(other.ip()).then(self.port().cmp(&other.port()))
}
@@ -645,6 +671,7 @@ impl Ord for SocketAddrV4 {
#[stable(feature = "socketaddr_ordering", since = "1.45.0")]
impl Ord for SocketAddrV6 {
+ #[inline]
fn cmp(&self, other: &SocketAddrV6) -> Ordering {
self.ip().cmp(other.ip()).then(self.port().cmp(&other.port()))
}
diff --git a/library/core/src/num/error.rs b/library/core/src/num/error.rs
index 1bae4efe7..14e99578a 100644
--- a/library/core/src/num/error.rs
+++ b/library/core/src/num/error.rs
@@ -26,15 +26,15 @@ impl Error for TryFromIntError {
}
#[stable(feature = "try_from", since = "1.34.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<Infallible> for TryFromIntError {
+impl From<Infallible> for TryFromIntError {
fn from(x: Infallible) -> TryFromIntError {
match x {}
}
}
#[unstable(feature = "never_type", issue = "35121")]
-impl const From<!> for TryFromIntError {
+impl From<!> for TryFromIntError {
+ #[inline]
fn from(never: !) -> TryFromIntError {
// Match rather than coerce to make sure that code like
// `From<Infallible> for TryFromIntError` above will keep working
diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs
index 1c6819b54..4a035ad61 100644
--- a/library/core/src/num/f32.rs
+++ b/library/core/src/num/f32.rs
@@ -940,6 +940,42 @@ impl f32 {
}
}
+ /// Calculates the middle point of `self` and `rhs`.
+ ///
+ /// This returns NaN when *either* argument is NaN or if a combination of
+ /// +inf and -inf is provided as arguments.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(num_midpoint)]
+ /// assert_eq!(1f32.midpoint(4.0), 2.5);
+ /// assert_eq!((-5.5f32).midpoint(8.0), 1.25);
+ /// ```
+ #[unstable(feature = "num_midpoint", issue = "110840")]
+ pub fn midpoint(self, other: f32) -> f32 {
+ const LO: f32 = f32::MIN_POSITIVE * 2.;
+ const HI: f32 = f32::MAX / 2.;
+
+ let (a, b) = (self, other);
+ let abs_a = a.abs_private();
+ let abs_b = b.abs_private();
+
+ if abs_a <= HI && abs_b <= HI {
+ // Overflow is impossible
+ (a + b) / 2.
+ } else if abs_a < LO {
+ // Not safe to halve a
+ a + (b / 2.)
+ } else if abs_b < LO {
+ // Not safe to halve b
+ (a / 2.) + b
+ } else {
+ // Not safe to halve a and b
+ (a / 2.) + (b / 2.)
+ }
+ }
+
/// Rounds toward zero and converts to any primitive integer type,
/// assuming that the value is finite and fits in that type.
///
diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs
index 1e7387217..3aafc435f 100644
--- a/library/core/src/num/f64.rs
+++ b/library/core/src/num/f64.rs
@@ -951,6 +951,42 @@ impl f64 {
}
}
+ /// Calculates the middle point of `self` and `rhs`.
+ ///
+ /// This returns NaN when *either* argument is NaN or if a combination of
+ /// +inf and -inf is provided as arguments.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(num_midpoint)]
+ /// assert_eq!(1f64.midpoint(4.0), 2.5);
+ /// assert_eq!((-5.5f64).midpoint(8.0), 1.25);
+ /// ```
+ #[unstable(feature = "num_midpoint", issue = "110840")]
+ pub fn midpoint(self, other: f64) -> f64 {
+ const LO: f64 = f64::MIN_POSITIVE * 2.;
+ const HI: f64 = f64::MAX / 2.;
+
+ let (a, b) = (self, other);
+ let abs_a = a.abs_private();
+ let abs_b = b.abs_private();
+
+ if abs_a <= HI && abs_b <= HI {
+ // Overflow is impossible
+ (a + b) / 2.
+ } else if abs_a < LO {
+ // Not safe to halve a
+ a + (b / 2.)
+ } else if abs_b < LO {
+ // Not safe to halve b
+ (a / 2.) + b
+ } else {
+ // Not safe to halve a and b
+ (a / 2.) + (b / 2.)
+ }
+ }
+
/// Rounds toward zero and converts to any primitive integer type,
/// assuming that the value is finite and fits in that type.
///
diff --git a/library/core/src/num/flt2dec/strategy/grisu.rs b/library/core/src/num/flt2dec/strategy/grisu.rs
index ed3e0edaf..b9f0d114c 100644
--- a/library/core/src/num/flt2dec/strategy/grisu.rs
+++ b/library/core/src/num/flt2dec/strategy/grisu.rs
@@ -487,6 +487,22 @@ pub fn format_exact_opt<'a>(
let vint = (v.f >> e) as u32;
let vfrac = v.f & ((1 << e) - 1);
+ let requested_digits = buf.len();
+
+ const POW10_UP_TO_9: [u32; 10] =
+ [1, 10, 100, 1000, 10_000, 100_000, 1_000_000, 10_000_000, 100_000_000, 1_000_000_000];
+
+ // We deviate from the original algorithm here and do some early checks to determine if we can satisfy requested_digits.
+ // If we determine that we can't, we exit early and avoid most of the heavy lifting that the algorithm otherwise does.
+ //
+ // When vfrac is zero, we can easily determine if vint can satisfy requested digits:
+ // If requested_digits >= 11, vint is not able to exhaust the count by itself since 10^(11 -1) > u32 max value >= vint.
+ // If vint < 10^(requested_digits - 1), vint cannot exhaust the count.
+ // Otherwise, vint might be able to exhaust the count and we need to execute the rest of the code.
+ if (vfrac == 0) && ((requested_digits >= 11) || (vint < POW10_UP_TO_9[requested_digits - 1])) {
+ return None;
+ }
+
// both old `v` and new `v` (scaled by `10^-k`) has an error of < 1 ulp (Theorem 5.1).
// as we don't know the error is positive or negative, we use two approximations
// spaced equally and have the maximal error of 2 ulps (same to the shortest case).
diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs
index aec15212d..1199d09b5 100644
--- a/library/core/src/num/int_macros.rs
+++ b/library/core/src/num/int_macros.rs
@@ -785,7 +785,7 @@ macro_rules! int_impl {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shl`.
// Any legal shift amount is losslessly representable in the self type.
- unsafe { intrinsics::unchecked_shl(self, rhs.try_into().ok().unwrap_unchecked()) }
+ unsafe { intrinsics::unchecked_shl(self, conv_rhs_for_unchecked_shift!($SelfT, rhs)) }
}
/// Checked shift right. Computes `self >> rhs`, returning `None` if `rhs` is
@@ -833,7 +833,7 @@ macro_rules! int_impl {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shr`.
// Any legal shift amount is losslessly representable in the self type.
- unsafe { intrinsics::unchecked_shr(self, rhs.try_into().ok().unwrap_unchecked()) }
+ unsafe { intrinsics::unchecked_shr(self, conv_rhs_for_unchecked_shift!($SelfT, rhs)) }
}
/// Checked absolute value. Computes `self.abs()`, returning `None` if
@@ -2332,6 +2332,44 @@ macro_rules! int_impl {
}
}
+ /// Calculates the middle point of `self` and `rhs`.
+ ///
+ /// `midpoint(a, b)` is `(a + b) >> 1` as if it were performed in a
+ /// sufficiently-large signed integral type. This implies that the result is
+ /// always rounded towards negative infinity and that no overflow will ever occur.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(num_midpoint)]
+ #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(4), 2);")]
+ #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(-1), -1);")]
+ #[doc = concat!("assert_eq!((-1", stringify!($SelfT), ").midpoint(0), -1);")]
+ /// ```
+ #[unstable(feature = "num_midpoint", issue = "110840")]
+ #[rustc_const_unstable(feature = "const_num_midpoint", issue = "110840")]
+ #[rustc_allow_const_fn_unstable(const_num_midpoint)]
+ #[must_use = "this returns the result of the operation, \
+ without modifying the original"]
+ #[inline]
+ pub const fn midpoint(self, rhs: Self) -> Self {
+ const U: $UnsignedT = <$SelfT>::MIN.unsigned_abs();
+
+ // Map an $SelfT to an $UnsignedT
+ // ex: i8 [-128; 127] to [0; 255]
+ const fn map(a: $SelfT) -> $UnsignedT {
+ (a as $UnsignedT) ^ U
+ }
+
+ // Map an $UnsignedT to an $SelfT
+ // ex: u8 [0; 255] to [-128; 127]
+ const fn demap(a: $UnsignedT) -> $SelfT {
+ (a ^ U) as $SelfT
+ }
+
+ demap(<$UnsignedT>::midpoint(map(self), map(rhs)))
+ }
+
/// Returns the logarithm of the number with respect to an arbitrary base,
/// rounded down.
///
@@ -2603,13 +2641,16 @@ macro_rules! int_impl {
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline(always)]
- #[rustc_allow_const_fn_unstable(const_cmp)]
pub const fn signum(self) -> Self {
// Picking the right way to phrase this is complicated
// (<https://graphics.stanford.edu/~seander/bithacks.html#CopyIntegerSign>)
// so delegate it to `Ord` which is already producing -1/0/+1
// exactly like we need and can be the place to deal with the complexity.
- self.cmp(&0) as _
+
+ // FIXME(const-hack): replace with cmp
+ if self < 0 { -1 }
+ else if self == 0 { 0 }
+ else { 1 }
}
/// Returns `true` if `self` is positive and `false` if the number is zero or
diff --git a/library/core/src/num/mod.rs b/library/core/src/num/mod.rs
index 9b812bbfc..c9baa09f4 100644
--- a/library/core/src/num/mod.rs
+++ b/library/core/src/num/mod.rs
@@ -95,6 +95,57 @@ depending on the target pointer size.
};
}
+macro_rules! midpoint_impl {
+ ($SelfT:ty, unsigned) => {
+ /// Calculates the middle point of `self` and `rhs`.
+ ///
+ /// `midpoint(a, b)` is `(a + b) >> 1` as if it were performed in a
+ /// sufficiently-large signed integral type. This implies that the result is
+ /// always rounded towards negative infinity and that no overflow will ever occur.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(num_midpoint)]
+ #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(4), 2);")]
+ #[doc = concat!("assert_eq!(1", stringify!($SelfT), ".midpoint(4), 2);")]
+ /// ```
+ #[unstable(feature = "num_midpoint", issue = "110840")]
+ #[rustc_const_unstable(feature = "const_num_midpoint", issue = "110840")]
+ #[must_use = "this returns the result of the operation, \
+ without modifying the original"]
+ #[inline]
+ pub const fn midpoint(self, rhs: $SelfT) -> $SelfT {
+ // Use the well known branchless algorthim from Hacker's Delight to compute
+ // `(a + b) / 2` without overflowing: `((a ^ b) >> 1) + (a & b)`.
+ ((self ^ rhs) >> 1) + (self & rhs)
+ }
+ };
+ ($SelfT:ty, $WideT:ty, unsigned) => {
+ /// Calculates the middle point of `self` and `rhs`.
+ ///
+ /// `midpoint(a, b)` is `(a + b) >> 1` as if it were performed in a
+ /// sufficiently-large signed integral type. This implies that the result is
+ /// always rounded towards negative infinity and that no overflow will ever occur.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(num_midpoint)]
+ #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".midpoint(4), 2);")]
+ #[doc = concat!("assert_eq!(1", stringify!($SelfT), ".midpoint(4), 2);")]
+ /// ```
+ #[unstable(feature = "num_midpoint", issue = "110840")]
+ #[rustc_const_unstable(feature = "const_num_midpoint", issue = "110840")]
+ #[must_use = "this returns the result of the operation, \
+ without modifying the original"]
+ #[inline]
+ pub const fn midpoint(self, rhs: $SelfT) -> $SelfT {
+ ((self as $WideT + rhs as $WideT) / 2) as $SelfT
+ }
+ };
+}
+
macro_rules! widening_impl {
($SelfT:ty, $WideT:ty, $BITS:literal, unsigned) => {
/// Calculates the complete product `self * rhs` without the possibility to overflow.
@@ -225,6 +276,23 @@ macro_rules! widening_impl {
};
}
+macro_rules! conv_rhs_for_unchecked_shift {
+ ($SelfT:ty, $x:expr) => {{
+ #[inline]
+ fn conv(x: u32) -> $SelfT {
+ // FIXME(const-hack) replace with `.try_into().ok().unwrap_unchecked()`.
+ // SAFETY: Any legal shift amount must be losslessly representable in the self type.
+ unsafe { x.try_into().ok().unwrap_unchecked() }
+ }
+ #[inline]
+ const fn const_conv(x: u32) -> $SelfT {
+ x as _
+ }
+
+ intrinsics::const_eval_select(($x,), const_conv, conv)
+ }};
+}
+
impl i8 {
int_impl! {
Self = i8,
@@ -438,6 +506,7 @@ impl u8 {
bound_condition = "",
}
widening_impl! { u8, u16, 8, unsigned }
+ midpoint_impl! { u8, u16, unsigned }
/// Checks if the value is within the ASCII range.
///
@@ -455,7 +524,16 @@ impl u8 {
#[rustc_const_stable(feature = "const_u8_is_ascii", since = "1.43.0")]
#[inline]
pub const fn is_ascii(&self) -> bool {
- *self & 128 == 0
+ *self <= 127
+ }
+
+ /// If the value of this byte is within the ASCII range, returns it as an
+ /// [ASCII character](ascii::Char). Otherwise, returns `None`.
+ #[must_use]
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[inline]
+ pub const fn as_ascii(&self) -> Option<ascii::Char> {
+ ascii::Char::from_u8(*self)
}
/// Makes a copy of the value in its ASCII upper case equivalent.
@@ -1040,6 +1118,7 @@ impl u16 {
bound_condition = "",
}
widening_impl! { u16, u32, 16, unsigned }
+ midpoint_impl! { u16, u32, unsigned }
/// Checks if the value is a Unicode surrogate code point, which are disallowed values for [`char`].
///
@@ -1088,6 +1167,7 @@ impl u32 {
bound_condition = "",
}
widening_impl! { u32, u64, 32, unsigned }
+ midpoint_impl! { u32, u64, unsigned }
}
impl u64 {
@@ -1111,6 +1191,7 @@ impl u64 {
bound_condition = "",
}
widening_impl! { u64, u128, 64, unsigned }
+ midpoint_impl! { u64, u128, unsigned }
}
impl u128 {
@@ -1135,6 +1216,7 @@ impl u128 {
from_xe_bytes_doc = "",
bound_condition = "",
}
+ midpoint_impl! { u128, unsigned }
}
#[cfg(target_pointer_width = "16")]
@@ -1159,6 +1241,7 @@ impl usize {
bound_condition = " on 16-bit targets",
}
widening_impl! { usize, u32, 16, unsigned }
+ midpoint_impl! { usize, u32, unsigned }
}
#[cfg(target_pointer_width = "32")]
@@ -1183,6 +1266,7 @@ impl usize {
bound_condition = " on 32-bit targets",
}
widening_impl! { usize, u64, 32, unsigned }
+ midpoint_impl! { usize, u64, unsigned }
}
#[cfg(target_pointer_width = "64")]
@@ -1207,6 +1291,7 @@ impl usize {
bound_condition = " on 64-bit targets",
}
widening_impl! { usize, u128, 64, unsigned }
+ midpoint_impl! { usize, u128, unsigned }
}
impl usize {
diff --git a/library/core/src/num/nonzero.rs b/library/core/src/num/nonzero.rs
index 49d23abee..7f06e170a 100644
--- a/library/core/src/num/nonzero.rs
+++ b/library/core/src/num/nonzero.rs
@@ -1,7 +1,7 @@
//! Definitions of integer that is known not to equal zero.
use crate::fmt;
-use crate::ops::{BitOr, BitOrAssign, Div, Rem};
+use crate::ops::{BitOr, BitOrAssign, Div, Neg, Rem};
use crate::str::FromStr;
use super::from_str_radix;
@@ -96,8 +96,7 @@ macro_rules! nonzero_integers {
}
#[stable(feature = "from_nonzero", since = "1.31.0")]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const From<$Ty> for $Int {
+ impl From<$Ty> for $Int {
#[doc = concat!("Converts a `", stringify!($Ty), "` into an `", stringify!($Int), "`")]
#[inline]
fn from(nonzero: $Ty) -> Self {
@@ -106,8 +105,7 @@ macro_rules! nonzero_integers {
}
#[stable(feature = "nonzero_bitor", since = "1.45.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOr for $Ty {
+ impl BitOr for $Ty {
type Output = Self;
#[inline]
fn bitor(self, rhs: Self) -> Self::Output {
@@ -118,8 +116,7 @@ macro_rules! nonzero_integers {
}
#[stable(feature = "nonzero_bitor", since = "1.45.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOr<$Int> for $Ty {
+ impl BitOr<$Int> for $Ty {
type Output = Self;
#[inline]
fn bitor(self, rhs: $Int) -> Self::Output {
@@ -131,8 +128,7 @@ macro_rules! nonzero_integers {
}
#[stable(feature = "nonzero_bitor", since = "1.45.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOr<$Ty> for $Int {
+ impl BitOr<$Ty> for $Int {
type Output = $Ty;
#[inline]
fn bitor(self, rhs: $Ty) -> Self::Output {
@@ -144,8 +140,7 @@ macro_rules! nonzero_integers {
}
#[stable(feature = "nonzero_bitor", since = "1.45.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOrAssign for $Ty {
+ impl BitOrAssign for $Ty {
#[inline]
fn bitor_assign(&mut self, rhs: Self) {
*self = *self | rhs;
@@ -153,8 +148,7 @@ macro_rules! nonzero_integers {
}
#[stable(feature = "nonzero_bitor", since = "1.45.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOrAssign<$Int> for $Ty {
+ impl BitOrAssign<$Int> for $Ty {
#[inline]
fn bitor_assign(&mut self, rhs: $Int) {
*self = *self | rhs;
@@ -276,8 +270,7 @@ macro_rules! nonzero_integers_div {
( $( $Ty: ident($Int: ty); )+ ) => {
$(
#[stable(feature = "nonzero_div", since = "1.51.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Div<$Ty> for $Int {
+ impl Div<$Ty> for $Int {
type Output = $Int;
/// This operation rounds towards zero,
/// truncating any fractional part of the exact result, and cannot panic.
@@ -290,8 +283,7 @@ macro_rules! nonzero_integers_div {
}
#[stable(feature = "nonzero_div", since = "1.51.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Rem<$Ty> for $Int {
+ impl Rem<$Ty> for $Int {
type Output = $Int;
/// This operation satisfies `n % d == n - (n / d) * d`, and cannot panic.
#[inline]
@@ -501,6 +493,43 @@ macro_rules! nonzero_unsigned_operations {
pub const fn ilog10(self) -> u32 {
super::int_log10::$Int(self.0)
}
+
+ /// Calculates the middle point of `self` and `rhs`.
+ ///
+ /// `midpoint(a, b)` is `(a + b) >> 1` as if it were performed in a
+ /// sufficiently-large signed integral type. This implies that the result is
+ /// always rounded towards negative infinity and that no overflow will ever occur.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(num_midpoint)]
+ #[doc = concat!("# use std::num::", stringify!($Ty), ";")]
+ ///
+ /// # fn main() { test().unwrap(); }
+ /// # fn test() -> Option<()> {
+ #[doc = concat!("let one = ", stringify!($Ty), "::new(1)?;")]
+ #[doc = concat!("let two = ", stringify!($Ty), "::new(2)?;")]
+ #[doc = concat!("let four = ", stringify!($Ty), "::new(4)?;")]
+ ///
+ /// assert_eq!(one.midpoint(four), two);
+ /// assert_eq!(four.midpoint(one), two);
+ /// # Some(())
+ /// # }
+ /// ```
+ #[unstable(feature = "num_midpoint", issue = "110840")]
+ #[rustc_const_unstable(feature = "const_num_midpoint", issue = "110840")]
+ #[rustc_allow_const_fn_unstable(const_num_midpoint)]
+ #[must_use = "this returns the result of the operation, \
+ without modifying the original"]
+ #[inline]
+ pub const fn midpoint(self, rhs: Self) -> Self {
+ // SAFETY: The only way to get `0` with midpoint is to have two opposite or
+ // near opposite numbers: (-5, 5), (0, 1), (0, 0) which is impossible because
+ // of the unsignedness of this number and also because $Ty is guaranteed to
+ // never being 0.
+ unsafe { $Ty::new_unchecked(self.get().midpoint(rhs.get())) }
+ }
}
)+
}
@@ -672,8 +701,7 @@ macro_rules! nonzero_signed_operations {
/// assert_eq!(pos, pos.wrapping_abs());
/// assert_eq!(pos, neg.wrapping_abs());
/// assert_eq!(min, min.wrapping_abs());
- /// # // FIXME: add once Neg is implemented?
- /// # // assert_eq!(max, (-max).wrapping_abs());
+ /// assert_eq!(max, (-max).wrapping_abs());
/// # Some(())
/// # }
/// ```
@@ -722,14 +750,37 @@ macro_rules! nonzero_signed_operations {
unsafe { $Uty::new_unchecked(self.get().unsigned_abs()) }
}
+ /// Returns `true` if `self` is positive and `false` if the
+ /// number is negative.
+ ///
+ /// # Example
+ ///
+ /// ```
+ #[doc = concat!("# use std::num::", stringify!($Ty), ";")]
+ /// # fn main() { test().unwrap(); }
+ /// # fn test() -> Option<()> {
+ #[doc = concat!("let pos_five = ", stringify!($Ty), "::new(5)?;")]
+ #[doc = concat!("let neg_five = ", stringify!($Ty), "::new(-5)?;")]
+ ///
+ /// assert!(pos_five.is_positive());
+ /// assert!(!neg_five.is_positive());
+ /// # Some(())
+ /// # }
+ /// ```
+ #[must_use]
+ #[inline]
+ #[stable(feature = "nonzero_negation_ops", since = "1.71.0")]
+ #[rustc_const_stable(feature = "nonzero_negation_ops", since = "1.71.0")]
+ pub const fn is_positive(self) -> bool {
+ self.get().is_positive()
+ }
+
/// Returns `true` if `self` is negative and `false` if the
/// number is positive.
///
/// # Example
///
/// ```
- /// #![feature(nonzero_negation_ops)]
- ///
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
@@ -743,7 +794,8 @@ macro_rules! nonzero_signed_operations {
/// ```
#[must_use]
#[inline]
- #[unstable(feature = "nonzero_negation_ops", issue = "102443")]
+ #[stable(feature = "nonzero_negation_ops", since = "1.71.0")]
+ #[rustc_const_stable(feature = "nonzero_negation_ops", since = "1.71.0")]
pub const fn is_negative(self) -> bool {
self.get().is_negative()
}
@@ -753,8 +805,6 @@ macro_rules! nonzero_signed_operations {
/// # Example
///
/// ```
- /// #![feature(nonzero_negation_ops)]
- ///
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
@@ -769,7 +819,8 @@ macro_rules! nonzero_signed_operations {
/// # }
/// ```
#[inline]
- #[unstable(feature = "nonzero_negation_ops", issue = "102443")]
+ #[stable(feature = "nonzero_negation_ops", since = "1.71.0")]
+ #[rustc_const_stable(feature = "nonzero_negation_ops", since = "1.71.0")]
pub const fn checked_neg(self) -> Option<$Ty> {
if let Some(result) = self.get().checked_neg() {
// SAFETY: negation of nonzero cannot yield zero values.
@@ -786,8 +837,6 @@ macro_rules! nonzero_signed_operations {
/// # Example
///
/// ```
- /// #![feature(nonzero_negation_ops)]
- ///
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
@@ -802,7 +851,8 @@ macro_rules! nonzero_signed_operations {
/// # }
/// ```
#[inline]
- #[unstable(feature = "nonzero_negation_ops", issue = "102443")]
+ #[stable(feature = "nonzero_negation_ops", since = "1.71.0")]
+ #[rustc_const_stable(feature = "nonzero_negation_ops", since = "1.71.0")]
pub const fn overflowing_neg(self) -> ($Ty, bool) {
let (result, overflow) = self.get().overflowing_neg();
// SAFETY: negation of nonzero cannot yield zero values.
@@ -815,8 +865,6 @@ macro_rules! nonzero_signed_operations {
/// # Example
///
/// ```
- /// #![feature(nonzero_negation_ops)]
- ///
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
@@ -836,7 +884,8 @@ macro_rules! nonzero_signed_operations {
/// # }
/// ```
#[inline]
- #[unstable(feature = "nonzero_negation_ops", issue = "102443")]
+ #[stable(feature = "nonzero_negation_ops", since = "1.71.0")]
+ #[rustc_const_stable(feature = "nonzero_negation_ops", since = "1.71.0")]
pub const fn saturating_neg(self) -> $Ty {
if let Some(result) = self.checked_neg() {
return result;
@@ -853,8 +902,6 @@ macro_rules! nonzero_signed_operations {
/// # Example
///
/// ```
- /// #![feature(nonzero_negation_ops)]
- ///
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
@@ -869,13 +916,28 @@ macro_rules! nonzero_signed_operations {
/// # }
/// ```
#[inline]
- #[unstable(feature = "nonzero_negation_ops", issue = "102443")]
+ #[stable(feature = "nonzero_negation_ops", since = "1.71.0")]
+ #[rustc_const_stable(feature = "nonzero_negation_ops", since = "1.71.0")]
pub const fn wrapping_neg(self) -> $Ty {
let result = self.get().wrapping_neg();
// SAFETY: negation of nonzero cannot yield zero values.
unsafe { $Ty::new_unchecked(result) }
}
}
+
+ #[stable(feature = "signed_nonzero_neg", since = "1.71.0")]
+ impl Neg for $Ty {
+ type Output = $Ty;
+
+ #[inline]
+ fn neg(self) -> $Ty {
+ // SAFETY: negation of nonzero cannot yield zero values.
+ unsafe { $Ty::new_unchecked(self.get().neg()) }
+ }
+ }
+
+ forward_ref_unop! { impl Neg, neg for $Ty,
+ #[stable(feature = "signed_nonzero_neg", since = "1.71.0")] }
)+
}
}
diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs
index 114deeea3..6f6b6dbb8 100644
--- a/library/core/src/num/uint_macros.rs
+++ b/library/core/src/num/uint_macros.rs
@@ -939,7 +939,7 @@ macro_rules! uint_impl {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shl`.
// Any legal shift amount is losslessly representable in the self type.
- unsafe { intrinsics::unchecked_shl(self, rhs.try_into().ok().unwrap_unchecked()) }
+ unsafe { intrinsics::unchecked_shl(self, conv_rhs_for_unchecked_shift!($SelfT, rhs)) }
}
/// Checked shift right. Computes `self >> rhs`, returning `None`
@@ -987,7 +987,7 @@ macro_rules! uint_impl {
// SAFETY: the caller must uphold the safety contract for
// `unchecked_shr`.
// Any legal shift amount is losslessly representable in the self type.
- unsafe { intrinsics::unchecked_shr(self, rhs.try_into().ok().unwrap_unchecked()) }
+ unsafe { intrinsics::unchecked_shr(self, conv_rhs_for_unchecked_shift!($SelfT, rhs)) }
}
/// Checked exponentiation. Computes `self.pow(exp)`, returning `None` if
diff --git a/library/core/src/num/wrapping.rs b/library/core/src/num/wrapping.rs
index 5353d900e..ed354a2e5 100644
--- a/library/core/src/num/wrapping.rs
+++ b/library/core/src/num/wrapping.rs
@@ -87,8 +87,7 @@ impl<T: fmt::UpperHex> fmt::UpperHex for Wrapping<T> {
macro_rules! sh_impl_signed {
($t:ident, $f:ident) => {
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Shl<$f> for Wrapping<$t> {
+ impl Shl<$f> for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -100,22 +99,20 @@ macro_rules! sh_impl_signed {
}
}
}
- forward_ref_binop! { impl const Shl, shl for Wrapping<$t>, $f,
+ forward_ref_binop! { impl Shl, shl for Wrapping<$t>, $f,
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const ShlAssign<$f> for Wrapping<$t> {
+ impl ShlAssign<$f> for Wrapping<$t> {
#[inline]
fn shl_assign(&mut self, other: $f) {
*self = *self << other;
}
}
- forward_ref_op_assign! { impl const ShlAssign, shl_assign for Wrapping<$t>, $f }
+ forward_ref_op_assign! { impl ShlAssign, shl_assign for Wrapping<$t>, $f }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Shr<$f> for Wrapping<$t> {
+ impl Shr<$f> for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -127,26 +124,24 @@ macro_rules! sh_impl_signed {
}
}
}
- forward_ref_binop! { impl const Shr, shr for Wrapping<$t>, $f,
+ forward_ref_binop! { impl Shr, shr for Wrapping<$t>, $f,
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const ShrAssign<$f> for Wrapping<$t> {
+ impl ShrAssign<$f> for Wrapping<$t> {
#[inline]
fn shr_assign(&mut self, other: $f) {
*self = *self >> other;
}
}
- forward_ref_op_assign! { impl const ShrAssign, shr_assign for Wrapping<$t>, $f }
+ forward_ref_op_assign! { impl ShrAssign, shr_assign for Wrapping<$t>, $f }
};
}
macro_rules! sh_impl_unsigned {
($t:ident, $f:ident) => {
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Shl<$f> for Wrapping<$t> {
+ impl Shl<$f> for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -154,22 +149,20 @@ macro_rules! sh_impl_unsigned {
Wrapping(self.0.wrapping_shl((other & self::shift_max::$t as $f) as u32))
}
}
- forward_ref_binop! { impl const Shl, shl for Wrapping<$t>, $f,
+ forward_ref_binop! { impl Shl, shl for Wrapping<$t>, $f,
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const ShlAssign<$f> for Wrapping<$t> {
+ impl ShlAssign<$f> for Wrapping<$t> {
#[inline]
fn shl_assign(&mut self, other: $f) {
*self = *self << other;
}
}
- forward_ref_op_assign! { impl const ShlAssign, shl_assign for Wrapping<$t>, $f }
+ forward_ref_op_assign! { impl ShlAssign, shl_assign for Wrapping<$t>, $f }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Shr<$f> for Wrapping<$t> {
+ impl Shr<$f> for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -177,18 +170,17 @@ macro_rules! sh_impl_unsigned {
Wrapping(self.0.wrapping_shr((other & self::shift_max::$t as $f) as u32))
}
}
- forward_ref_binop! { impl const Shr, shr for Wrapping<$t>, $f,
+ forward_ref_binop! { impl Shr, shr for Wrapping<$t>, $f,
#[stable(feature = "wrapping_ref_ops", since = "1.39.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const ShrAssign<$f> for Wrapping<$t> {
+ impl ShrAssign<$f> for Wrapping<$t> {
#[inline]
fn shr_assign(&mut self, other: $f) {
*self = *self >> other;
}
}
- forward_ref_op_assign! { impl const ShrAssign, shr_assign for Wrapping<$t>, $f }
+ forward_ref_op_assign! { impl ShrAssign, shr_assign for Wrapping<$t>, $f }
};
}
@@ -217,8 +209,7 @@ sh_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
macro_rules! wrapping_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Add for Wrapping<$t> {
+ impl Add for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -226,32 +217,29 @@ macro_rules! wrapping_impl {
Wrapping(self.0.wrapping_add(other.0))
}
}
- forward_ref_binop! { impl const Add, add for Wrapping<$t>, Wrapping<$t>,
+ forward_ref_binop! { impl Add, add for Wrapping<$t>, Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const AddAssign for Wrapping<$t> {
+ impl AddAssign for Wrapping<$t> {
#[inline]
fn add_assign(&mut self, other: Wrapping<$t>) {
*self = *self + other;
}
}
- forward_ref_op_assign! { impl const AddAssign, add_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl AddAssign, add_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const AddAssign<$t> for Wrapping<$t> {
+ impl AddAssign<$t> for Wrapping<$t> {
#[inline]
fn add_assign(&mut self, other: $t) {
*self = *self + Wrapping(other);
}
}
- forward_ref_op_assign! { impl const AddAssign, add_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl AddAssign, add_assign for Wrapping<$t>, $t }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Sub for Wrapping<$t> {
+ impl Sub for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -259,32 +247,29 @@ macro_rules! wrapping_impl {
Wrapping(self.0.wrapping_sub(other.0))
}
}
- forward_ref_binop! { impl const Sub, sub for Wrapping<$t>, Wrapping<$t>,
+ forward_ref_binop! { impl Sub, sub for Wrapping<$t>, Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const SubAssign for Wrapping<$t> {
+ impl SubAssign for Wrapping<$t> {
#[inline]
fn sub_assign(&mut self, other: Wrapping<$t>) {
*self = *self - other;
}
}
- forward_ref_op_assign! { impl const SubAssign, sub_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl SubAssign, sub_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const SubAssign<$t> for Wrapping<$t> {
+ impl SubAssign<$t> for Wrapping<$t> {
#[inline]
fn sub_assign(&mut self, other: $t) {
*self = *self - Wrapping(other);
}
}
- forward_ref_op_assign! { impl const SubAssign, sub_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl SubAssign, sub_assign for Wrapping<$t>, $t }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Mul for Wrapping<$t> {
+ impl Mul for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -296,28 +281,25 @@ macro_rules! wrapping_impl {
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const MulAssign for Wrapping<$t> {
+ impl MulAssign for Wrapping<$t> {
#[inline]
fn mul_assign(&mut self, other: Wrapping<$t>) {
*self = *self * other;
}
}
- forward_ref_op_assign! { impl const MulAssign, mul_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl MulAssign, mul_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const MulAssign<$t> for Wrapping<$t> {
+ impl MulAssign<$t> for Wrapping<$t> {
#[inline]
fn mul_assign(&mut self, other: $t) {
*self = *self * Wrapping(other);
}
}
- forward_ref_op_assign! { impl const MulAssign, mul_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl MulAssign, mul_assign for Wrapping<$t>, $t }
#[stable(feature = "wrapping_div", since = "1.3.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Div for Wrapping<$t> {
+ impl Div for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -325,32 +307,29 @@ macro_rules! wrapping_impl {
Wrapping(self.0.wrapping_div(other.0))
}
}
- forward_ref_binop! { impl const Div, div for Wrapping<$t>, Wrapping<$t>,
+ forward_ref_binop! { impl Div, div for Wrapping<$t>, Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const DivAssign for Wrapping<$t> {
+ impl DivAssign for Wrapping<$t> {
#[inline]
fn div_assign(&mut self, other: Wrapping<$t>) {
*self = *self / other;
}
}
- forward_ref_op_assign! { impl const DivAssign, div_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl DivAssign, div_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const DivAssign<$t> for Wrapping<$t> {
+ impl DivAssign<$t> for Wrapping<$t> {
#[inline]
fn div_assign(&mut self, other: $t) {
*self = *self / Wrapping(other);
}
}
- forward_ref_op_assign! { impl const DivAssign, div_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl DivAssign, div_assign for Wrapping<$t>, $t }
#[stable(feature = "wrapping_impls", since = "1.7.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Rem for Wrapping<$t> {
+ impl Rem for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -358,32 +337,29 @@ macro_rules! wrapping_impl {
Wrapping(self.0.wrapping_rem(other.0))
}
}
- forward_ref_binop! { impl const Rem, rem for Wrapping<$t>, Wrapping<$t>,
+ forward_ref_binop! { impl Rem, rem for Wrapping<$t>, Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const RemAssign for Wrapping<$t> {
+ impl RemAssign for Wrapping<$t> {
#[inline]
fn rem_assign(&mut self, other: Wrapping<$t>) {
*self = *self % other;
}
}
- forward_ref_op_assign! { impl const RemAssign, rem_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl RemAssign, rem_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const RemAssign<$t> for Wrapping<$t> {
+ impl RemAssign<$t> for Wrapping<$t> {
#[inline]
fn rem_assign(&mut self, other: $t) {
*self = *self % Wrapping(other);
}
}
- forward_ref_op_assign! { impl const RemAssign, rem_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl RemAssign, rem_assign for Wrapping<$t>, $t }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Not for Wrapping<$t> {
+ impl Not for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -391,12 +367,11 @@ macro_rules! wrapping_impl {
Wrapping(!self.0)
}
}
- forward_ref_unop! { impl const Not, not for Wrapping<$t>,
+ forward_ref_unop! { impl Not, not for Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitXor for Wrapping<$t> {
+ impl BitXor for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -404,32 +379,29 @@ macro_rules! wrapping_impl {
Wrapping(self.0 ^ other.0)
}
}
- forward_ref_binop! { impl const BitXor, bitxor for Wrapping<$t>, Wrapping<$t>,
+ forward_ref_binop! { impl BitXor, bitxor for Wrapping<$t>, Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitXorAssign for Wrapping<$t> {
+ impl BitXorAssign for Wrapping<$t> {
#[inline]
fn bitxor_assign(&mut self, other: Wrapping<$t>) {
*self = *self ^ other;
}
}
- forward_ref_op_assign! { impl const BitXorAssign, bitxor_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl BitXorAssign, bitxor_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitXorAssign<$t> for Wrapping<$t> {
+ impl BitXorAssign<$t> for Wrapping<$t> {
#[inline]
fn bitxor_assign(&mut self, other: $t) {
*self = *self ^ Wrapping(other);
}
}
- forward_ref_op_assign! { impl const BitXorAssign, bitxor_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl BitXorAssign, bitxor_assign for Wrapping<$t>, $t }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOr for Wrapping<$t> {
+ impl BitOr for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -437,32 +409,29 @@ macro_rules! wrapping_impl {
Wrapping(self.0 | other.0)
}
}
- forward_ref_binop! { impl const BitOr, bitor for Wrapping<$t>, Wrapping<$t>,
+ forward_ref_binop! { impl BitOr, bitor for Wrapping<$t>, Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOrAssign for Wrapping<$t> {
+ impl BitOrAssign for Wrapping<$t> {
#[inline]
fn bitor_assign(&mut self, other: Wrapping<$t>) {
*self = *self | other;
}
}
- forward_ref_op_assign! { impl const BitOrAssign, bitor_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl BitOrAssign, bitor_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOrAssign<$t> for Wrapping<$t> {
+ impl BitOrAssign<$t> for Wrapping<$t> {
#[inline]
fn bitor_assign(&mut self, other: $t) {
*self = *self | Wrapping(other);
}
}
- forward_ref_op_assign! { impl const BitOrAssign, bitor_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl BitOrAssign, bitor_assign for Wrapping<$t>, $t }
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitAnd for Wrapping<$t> {
+ impl BitAnd for Wrapping<$t> {
type Output = Wrapping<$t>;
#[inline]
@@ -470,39 +439,36 @@ macro_rules! wrapping_impl {
Wrapping(self.0 & other.0)
}
}
- forward_ref_binop! { impl const BitAnd, bitand for Wrapping<$t>, Wrapping<$t>,
+ forward_ref_binop! { impl BitAnd, bitand for Wrapping<$t>, Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitAndAssign for Wrapping<$t> {
+ impl BitAndAssign for Wrapping<$t> {
#[inline]
fn bitand_assign(&mut self, other: Wrapping<$t>) {
*self = *self & other;
}
}
- forward_ref_op_assign! { impl const BitAndAssign, bitand_assign for Wrapping<$t>, Wrapping<$t> }
+ forward_ref_op_assign! { impl BitAndAssign, bitand_assign for Wrapping<$t>, Wrapping<$t> }
#[stable(feature = "wrapping_int_assign_impl", since = "1.60.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitAndAssign<$t> for Wrapping<$t> {
+ impl BitAndAssign<$t> for Wrapping<$t> {
#[inline]
fn bitand_assign(&mut self, other: $t) {
*self = *self & Wrapping(other);
}
}
- forward_ref_op_assign! { impl const BitAndAssign, bitand_assign for Wrapping<$t>, $t }
+ forward_ref_op_assign! { impl BitAndAssign, bitand_assign for Wrapping<$t>, $t }
#[stable(feature = "wrapping_neg", since = "1.10.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Neg for Wrapping<$t> {
+ impl Neg for Wrapping<$t> {
type Output = Self;
#[inline]
fn neg(self) -> Self {
Wrapping(0) - self
}
}
- forward_ref_unop! { impl const Neg, neg for Wrapping<$t>,
+ forward_ref_unop! { impl Neg, neg for Wrapping<$t>,
#[stable(feature = "wrapping_ref", since = "1.14.0")] }
)*)
diff --git a/library/core/src/ops/arith.rs b/library/core/src/ops/arith.rs
index 0c7ee9630..840c8cd2f 100644
--- a/library/core/src/ops/arith.rs
+++ b/library/core/src/ops/arith.rs
@@ -73,7 +73,6 @@
append_const_msg
)]
#[doc(alias = "+")]
-#[const_trait]
pub trait Add<Rhs = Self> {
/// The resulting type after applying the `+` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -95,8 +94,7 @@ pub trait Add<Rhs = Self> {
macro_rules! add_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Add for $t {
+ impl Add for $t {
type Output = $t;
#[inline]
@@ -104,7 +102,7 @@ macro_rules! add_impl {
fn add(self, other: $t) -> $t { self + other }
}
- forward_ref_binop! { impl const Add, add for $t, $t }
+ forward_ref_binop! { impl Add, add for $t, $t }
)*)
}
@@ -183,7 +181,6 @@ add_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
append_const_msg
)]
#[doc(alias = "-")]
-#[const_trait]
pub trait Sub<Rhs = Self> {
/// The resulting type after applying the `-` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -205,8 +202,7 @@ pub trait Sub<Rhs = Self> {
macro_rules! sub_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Sub for $t {
+ impl Sub for $t {
type Output = $t;
#[inline]
@@ -214,7 +210,7 @@ macro_rules! sub_impl {
fn sub(self, other: $t) -> $t { self - other }
}
- forward_ref_binop! { impl const Sub, sub for $t, $t }
+ forward_ref_binop! { impl Sub, sub for $t, $t }
)*)
}
@@ -314,7 +310,6 @@ sub_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
label = "no implementation for `{Self} * {Rhs}`"
)]
#[doc(alias = "*")]
-#[const_trait]
pub trait Mul<Rhs = Self> {
/// The resulting type after applying the `*` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -336,8 +331,7 @@ pub trait Mul<Rhs = Self> {
macro_rules! mul_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Mul for $t {
+ impl Mul for $t {
type Output = $t;
#[inline]
@@ -345,7 +339,7 @@ macro_rules! mul_impl {
fn mul(self, other: $t) -> $t { self * other }
}
- forward_ref_binop! { impl const Mul, mul for $t, $t }
+ forward_ref_binop! { impl Mul, mul for $t, $t }
)*)
}
@@ -449,7 +443,6 @@ mul_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
label = "no implementation for `{Self} / {Rhs}`"
)]
#[doc(alias = "/")]
-#[const_trait]
pub trait Div<Rhs = Self> {
/// The resulting type after applying the `/` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -477,15 +470,14 @@ macro_rules! div_impl_integer {
///
#[doc = $panic]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Div for $t {
+ impl Div for $t {
type Output = $t;
#[inline]
fn div(self, other: $t) -> $t { self / other }
}
- forward_ref_binop! { impl const Div, div for $t, $t }
+ forward_ref_binop! { impl Div, div for $t, $t }
)*)*)
}
@@ -497,15 +489,14 @@ div_impl_integer! {
macro_rules! div_impl_float {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Div for $t {
+ impl Div for $t {
type Output = $t;
#[inline]
fn div(self, other: $t) -> $t { self / other }
}
- forward_ref_binop! { impl const Div, div for $t, $t }
+ forward_ref_binop! { impl Div, div for $t, $t }
)*)
}
@@ -526,7 +517,7 @@ div_impl_float! { f32 f64 }
/// use std::ops::Rem;
///
/// #[derive(PartialEq, Debug)]
-/// struct SplitSlice<'a, T: 'a> {
+/// struct SplitSlice<'a, T> {
/// slice: &'a [T],
/// }
///
@@ -553,7 +544,6 @@ div_impl_float! { f32 f64 }
label = "no implementation for `{Self} % {Rhs}`"
)]
#[doc(alias = "%")]
-#[const_trait]
pub trait Rem<Rhs = Self> {
/// The resulting type after applying the `%` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -581,15 +571,14 @@ macro_rules! rem_impl_integer {
///
#[doc = $panic]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Rem for $t {
+ impl Rem for $t {
type Output = $t;
#[inline]
fn rem(self, other: $t) -> $t { self % other }
}
- forward_ref_binop! { impl const Rem, rem for $t, $t }
+ forward_ref_binop! { impl Rem, rem for $t, $t }
)*)*)
}
@@ -616,15 +605,14 @@ macro_rules! rem_impl_float {
/// assert_eq!(x % y, remainder);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Rem for $t {
+ impl Rem for $t {
type Output = $t;
#[inline]
fn rem(self, other: $t) -> $t { self % other }
}
- forward_ref_binop! { impl const Rem, rem for $t, $t }
+ forward_ref_binop! { impl Rem, rem for $t, $t }
)*)
}
@@ -669,7 +657,6 @@ rem_impl_float! { f32 f64 }
#[lang = "neg"]
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(alias = "-")]
-#[const_trait]
pub trait Neg {
/// The resulting type after applying the `-` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -692,8 +679,7 @@ pub trait Neg {
macro_rules! neg_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Neg for $t {
+ impl Neg for $t {
type Output = $t;
#[inline]
@@ -701,7 +687,7 @@ macro_rules! neg_impl {
fn neg(self) -> $t { -self }
}
- forward_ref_unop! { impl const Neg, neg for $t }
+ forward_ref_unop! { impl Neg, neg for $t }
)*)
}
@@ -744,7 +730,6 @@ neg_impl! { isize i8 i16 i32 i64 i128 f32 f64 }
)]
#[doc(alias = "+")]
#[doc(alias = "+=")]
-#[const_trait]
pub trait AddAssign<Rhs = Self> {
/// Performs the `+=` operation.
///
@@ -762,14 +747,13 @@ pub trait AddAssign<Rhs = Self> {
macro_rules! add_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const AddAssign for $t {
+ impl AddAssign for $t {
#[inline]
#[rustc_inherit_overflow_checks]
fn add_assign(&mut self, other: $t) { *self += other }
}
- forward_ref_op_assign! { impl const AddAssign, add_assign for $t, $t }
+ forward_ref_op_assign! { impl AddAssign, add_assign for $t, $t }
)+)
}
@@ -812,7 +796,6 @@ add_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
)]
#[doc(alias = "-")]
#[doc(alias = "-=")]
-#[const_trait]
pub trait SubAssign<Rhs = Self> {
/// Performs the `-=` operation.
///
@@ -830,14 +813,13 @@ pub trait SubAssign<Rhs = Self> {
macro_rules! sub_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const SubAssign for $t {
+ impl SubAssign for $t {
#[inline]
#[rustc_inherit_overflow_checks]
fn sub_assign(&mut self, other: $t) { *self -= other }
}
- forward_ref_op_assign! { impl const SubAssign, sub_assign for $t, $t }
+ forward_ref_op_assign! { impl SubAssign, sub_assign for $t, $t }
)+)
}
@@ -871,7 +853,6 @@ sub_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
)]
#[doc(alias = "*")]
#[doc(alias = "*=")]
-#[const_trait]
pub trait MulAssign<Rhs = Self> {
/// Performs the `*=` operation.
///
@@ -889,14 +870,13 @@ pub trait MulAssign<Rhs = Self> {
macro_rules! mul_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const MulAssign for $t {
+ impl MulAssign for $t {
#[inline]
#[rustc_inherit_overflow_checks]
fn mul_assign(&mut self, other: $t) { *self *= other }
}
- forward_ref_op_assign! { impl const MulAssign, mul_assign for $t, $t }
+ forward_ref_op_assign! { impl MulAssign, mul_assign for $t, $t }
)+)
}
@@ -930,7 +910,6 @@ mul_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
)]
#[doc(alias = "/")]
#[doc(alias = "/=")]
-#[const_trait]
pub trait DivAssign<Rhs = Self> {
/// Performs the `/=` operation.
///
@@ -948,13 +927,12 @@ pub trait DivAssign<Rhs = Self> {
macro_rules! div_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const DivAssign for $t {
+ impl DivAssign for $t {
#[inline]
fn div_assign(&mut self, other: $t) { *self /= other }
}
- forward_ref_op_assign! { impl const DivAssign, div_assign for $t, $t }
+ forward_ref_op_assign! { impl DivAssign, div_assign for $t, $t }
)+)
}
@@ -992,7 +970,6 @@ div_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 }
)]
#[doc(alias = "%")]
#[doc(alias = "%=")]
-#[const_trait]
pub trait RemAssign<Rhs = Self> {
/// Performs the `%=` operation.
///
@@ -1010,13 +987,12 @@ pub trait RemAssign<Rhs = Self> {
macro_rules! rem_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const RemAssign for $t {
+ impl RemAssign for $t {
#[inline]
fn rem_assign(&mut self, other: $t) { *self %= other }
}
- forward_ref_op_assign! { impl const RemAssign, rem_assign for $t, $t }
+ forward_ref_op_assign! { impl RemAssign, rem_assign for $t, $t }
)+)
}
diff --git a/library/core/src/ops/bit.rs b/library/core/src/ops/bit.rs
index 327009801..c70f4a3da 100644
--- a/library/core/src/ops/bit.rs
+++ b/library/core/src/ops/bit.rs
@@ -31,7 +31,6 @@
#[lang = "not"]
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(alias = "!")]
-#[const_trait]
pub trait Not {
/// The resulting type after applying the `!` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -55,23 +54,21 @@ pub trait Not {
macro_rules! not_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Not for $t {
+ impl Not for $t {
type Output = $t;
#[inline]
fn not(self) -> $t { !self }
}
- forward_ref_unop! { impl const Not, not for $t }
+ forward_ref_unop! { impl Not, not for $t }
)*)
}
not_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
#[stable(feature = "not_never", since = "1.60.0")]
-#[rustc_const_unstable(feature = "const_ops", issue = "90080")]
-impl const Not for ! {
+impl Not for ! {
type Output = !;
#[inline]
@@ -144,7 +141,6 @@ impl const Not for ! {
message = "no implementation for `{Self} & {Rhs}`",
label = "no implementation for `{Self} & {Rhs}`"
)]
-#[const_trait]
pub trait BitAnd<Rhs = Self> {
/// The resulting type after applying the `&` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -168,15 +164,14 @@ pub trait BitAnd<Rhs = Self> {
macro_rules! bitand_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitAnd for $t {
+ impl BitAnd for $t {
type Output = $t;
#[inline]
fn bitand(self, rhs: $t) -> $t { self & rhs }
}
- forward_ref_binop! { impl const BitAnd, bitand for $t, $t }
+ forward_ref_binop! { impl BitAnd, bitand for $t, $t }
)*)
}
@@ -246,7 +241,6 @@ bitand_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
message = "no implementation for `{Self} | {Rhs}`",
label = "no implementation for `{Self} | {Rhs}`"
)]
-#[const_trait]
pub trait BitOr<Rhs = Self> {
/// The resulting type after applying the `|` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -270,15 +264,14 @@ pub trait BitOr<Rhs = Self> {
macro_rules! bitor_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOr for $t {
+ impl BitOr for $t {
type Output = $t;
#[inline]
fn bitor(self, rhs: $t) -> $t { self | rhs }
}
- forward_ref_binop! { impl const BitOr, bitor for $t, $t }
+ forward_ref_binop! { impl BitOr, bitor for $t, $t }
)*)
}
@@ -348,7 +341,6 @@ bitor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
message = "no implementation for `{Self} ^ {Rhs}`",
label = "no implementation for `{Self} ^ {Rhs}`"
)]
-#[const_trait]
pub trait BitXor<Rhs = Self> {
/// The resulting type after applying the `^` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -372,15 +364,14 @@ pub trait BitXor<Rhs = Self> {
macro_rules! bitxor_impl {
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitXor for $t {
+ impl BitXor for $t {
type Output = $t;
#[inline]
fn bitxor(self, other: $t) -> $t { self ^ other }
}
- forward_ref_binop! { impl const BitXor, bitxor for $t, $t }
+ forward_ref_binop! { impl BitXor, bitxor for $t, $t }
)*)
}
@@ -449,7 +440,6 @@ bitxor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
message = "no implementation for `{Self} << {Rhs}`",
label = "no implementation for `{Self} << {Rhs}`"
)]
-#[const_trait]
pub trait Shl<Rhs = Self> {
/// The resulting type after applying the `<<` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -471,8 +461,7 @@ pub trait Shl<Rhs = Self> {
macro_rules! shl_impl {
($t:ty, $f:ty) => {
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Shl<$f> for $t {
+ impl Shl<$f> for $t {
type Output = $t;
#[inline]
@@ -482,7 +471,7 @@ macro_rules! shl_impl {
}
}
- forward_ref_binop! { impl const Shl, shl for $t, $f }
+ forward_ref_binop! { impl Shl, shl for $t, $f }
};
}
@@ -569,7 +558,6 @@ shl_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 isize i128 }
message = "no implementation for `{Self} >> {Rhs}`",
label = "no implementation for `{Self} >> {Rhs}`"
)]
-#[const_trait]
pub trait Shr<Rhs = Self> {
/// The resulting type after applying the `>>` operator.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -591,8 +579,7 @@ pub trait Shr<Rhs = Self> {
macro_rules! shr_impl {
($t:ty, $f:ty) => {
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const Shr<$f> for $t {
+ impl Shr<$f> for $t {
type Output = $t;
#[inline]
@@ -602,7 +589,7 @@ macro_rules! shr_impl {
}
}
- forward_ref_binop! { impl const Shr, shr for $t, $f }
+ forward_ref_binop! { impl Shr, shr for $t, $f }
};
}
@@ -698,7 +685,6 @@ shr_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
message = "no implementation for `{Self} &= {Rhs}`",
label = "no implementation for `{Self} &= {Rhs}`"
)]
-#[const_trait]
pub trait BitAndAssign<Rhs = Self> {
/// Performs the `&=` operation.
///
@@ -728,13 +714,12 @@ pub trait BitAndAssign<Rhs = Self> {
macro_rules! bitand_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitAndAssign for $t {
+ impl BitAndAssign for $t {
#[inline]
fn bitand_assign(&mut self, other: $t) { *self &= other }
}
- forward_ref_op_assign! { impl const BitAndAssign, bitand_assign for $t, $t }
+ forward_ref_op_assign! { impl BitAndAssign, bitand_assign for $t, $t }
)+)
}
@@ -771,7 +756,6 @@ bitand_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
message = "no implementation for `{Self} |= {Rhs}`",
label = "no implementation for `{Self} |= {Rhs}`"
)]
-#[const_trait]
pub trait BitOrAssign<Rhs = Self> {
/// Performs the `|=` operation.
///
@@ -801,13 +785,12 @@ pub trait BitOrAssign<Rhs = Self> {
macro_rules! bitor_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitOrAssign for $t {
+ impl BitOrAssign for $t {
#[inline]
fn bitor_assign(&mut self, other: $t) { *self |= other }
}
- forward_ref_op_assign! { impl const BitOrAssign, bitor_assign for $t, $t }
+ forward_ref_op_assign! { impl BitOrAssign, bitor_assign for $t, $t }
)+)
}
@@ -844,7 +827,6 @@ bitor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
message = "no implementation for `{Self} ^= {Rhs}`",
label = "no implementation for `{Self} ^= {Rhs}`"
)]
-#[const_trait]
pub trait BitXorAssign<Rhs = Self> {
/// Performs the `^=` operation.
///
@@ -874,13 +856,12 @@ pub trait BitXorAssign<Rhs = Self> {
macro_rules! bitxor_assign_impl {
($($t:ty)+) => ($(
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const BitXorAssign for $t {
+ impl BitXorAssign for $t {
#[inline]
fn bitxor_assign(&mut self, other: $t) { *self ^= other }
}
- forward_ref_op_assign! { impl const BitXorAssign, bitxor_assign for $t, $t }
+ forward_ref_op_assign! { impl BitXorAssign, bitxor_assign for $t, $t }
)+)
}
@@ -915,7 +896,6 @@ bitxor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
message = "no implementation for `{Self} <<= {Rhs}`",
label = "no implementation for `{Self} <<= {Rhs}`"
)]
-#[const_trait]
pub trait ShlAssign<Rhs = Self> {
/// Performs the `<<=` operation.
///
@@ -937,8 +917,7 @@ pub trait ShlAssign<Rhs = Self> {
macro_rules! shl_assign_impl {
($t:ty, $f:ty) => {
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const ShlAssign<$f> for $t {
+ impl ShlAssign<$f> for $t {
#[inline]
#[rustc_inherit_overflow_checks]
fn shl_assign(&mut self, other: $f) {
@@ -946,7 +925,7 @@ macro_rules! shl_assign_impl {
}
}
- forward_ref_op_assign! { impl const ShlAssign, shl_assign for $t, $f }
+ forward_ref_op_assign! { impl ShlAssign, shl_assign for $t, $f }
};
}
@@ -999,7 +978,6 @@ shl_assign_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
message = "no implementation for `{Self} >>= {Rhs}`",
label = "no implementation for `{Self} >>= {Rhs}`"
)]
-#[const_trait]
pub trait ShrAssign<Rhs = Self> {
/// Performs the `>>=` operation.
///
@@ -1021,8 +999,7 @@ pub trait ShrAssign<Rhs = Self> {
macro_rules! shr_assign_impl {
($t:ty, $f:ty) => {
#[stable(feature = "op_assign_traits", since = "1.8.0")]
- #[rustc_const_unstable(feature = "const_ops", issue = "90080")]
- impl const ShrAssign<$f> for $t {
+ impl ShrAssign<$f> for $t {
#[inline]
#[rustc_inherit_overflow_checks]
fn shr_assign(&mut self, other: $f) {
@@ -1030,7 +1007,7 @@ macro_rules! shr_assign_impl {
}
}
- forward_ref_op_assign! { impl const ShrAssign, shr_assign for $t, $f }
+ forward_ref_op_assign! { impl ShrAssign, shr_assign for $t, $f }
};
}
diff --git a/library/core/src/ops/control_flow.rs b/library/core/src/ops/control_flow.rs
index 117706fb4..e10c438ef 100644
--- a/library/core/src/ops/control_flow.rs
+++ b/library/core/src/ops/control_flow.rs
@@ -97,8 +97,7 @@ pub enum ControlFlow<B, C = ()> {
}
#[unstable(feature = "try_trait_v2", issue = "84277")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<B, C> const ops::Try for ControlFlow<B, C> {
+impl<B, C> ops::Try for ControlFlow<B, C> {
type Output = C;
type Residual = ControlFlow<B, convert::Infallible>;
@@ -117,8 +116,7 @@ impl<B, C> const ops::Try for ControlFlow<B, C> {
}
#[unstable(feature = "try_trait_v2", issue = "84277")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<B, C> const ops::FromResidual for ControlFlow<B, C> {
+impl<B, C> ops::FromResidual for ControlFlow<B, C> {
#[inline]
fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self {
match residual {
@@ -128,8 +126,7 @@ impl<B, C> const ops::FromResidual for ControlFlow<B, C> {
}
#[unstable(feature = "try_trait_v2_residual", issue = "91285")]
-#[rustc_const_unstable(feature = "const_try", issue = "74935")]
-impl<B, C> const ops::Residual<C> for ControlFlow<B, convert::Infallible> {
+impl<B, C> ops::Residual<C> for ControlFlow<B, convert::Infallible> {
type TryType = ControlFlow<B, C>;
}
diff --git a/library/core/src/ops/deref.rs b/library/core/src/ops/deref.rs
index c67867f44..08c35b6da 100644
--- a/library/core/src/ops/deref.rs
+++ b/library/core/src/ops/deref.rs
@@ -61,7 +61,6 @@
#[doc(alias = "&*")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "Deref"]
-#[const_trait]
pub trait Deref {
/// The resulting type after dereferencing.
#[stable(feature = "rust1", since = "1.0.0")]
@@ -77,8 +76,7 @@ pub trait Deref {
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_deref", issue = "88955")]
-impl<T: ?Sized> const Deref for &T {
+impl<T: ?Sized> Deref for &T {
type Target = T;
#[rustc_diagnostic_item = "noop_method_deref"]
@@ -91,8 +89,7 @@ impl<T: ?Sized> const Deref for &T {
impl<T: ?Sized> !DerefMut for &T {}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_deref", issue = "88955")]
-impl<T: ?Sized> const Deref for &mut T {
+impl<T: ?Sized> Deref for &mut T {
type Target = T;
fn deref(&self) -> &T {
@@ -170,7 +167,6 @@ impl<T: ?Sized> const Deref for &mut T {
#[lang = "deref_mut"]
#[doc(alias = "*")]
#[stable(feature = "rust1", since = "1.0.0")]
-#[const_trait]
pub trait DerefMut: Deref {
/// Mutably dereferences the value.
#[stable(feature = "rust1", since = "1.0.0")]
diff --git a/library/core/src/ops/drop.rs b/library/core/src/ops/drop.rs
index a2c3d978c..9ebf426be 100644
--- a/library/core/src/ops/drop.rs
+++ b/library/core/src/ops/drop.rs
@@ -132,6 +132,74 @@
/// are `Copy` get implicitly duplicated by the compiler, making it very
/// hard to predict when, and how often destructors will be executed. As such,
/// these types cannot have destructors.
+///
+/// ## Drop check
+///
+/// Dropping interacts with the borrow checker in subtle ways: when a type `T` is being implicitly
+/// dropped as some variable of this type goes out of scope, the borrow checker needs to ensure that
+/// calling `T`'s destructor at this moment is safe. In particular, it also needs to be safe to
+/// recursively drop all the fields of `T`. For example, it is crucial that code like the following
+/// is being rejected:
+///
+/// ```compile_fail,E0597
+/// use std::cell::Cell;
+///
+/// struct S<'a>(Cell<Option<&'a S<'a>>>, Box<i32>);
+/// impl Drop for S<'_> {
+/// fn drop(&mut self) {
+/// if let Some(r) = self.0.get() {
+/// // Print the contents of the `Box` in `r`.
+/// println!("{}", r.1);
+/// }
+/// }
+/// }
+///
+/// fn main() {
+/// // Set up two `S` that point to each other.
+/// let s1 = S(Cell::new(None), Box::new(42));
+/// let s2 = S(Cell::new(Some(&s1)), Box::new(42));
+/// s1.0.set(Some(&s2));
+/// // Now they both get dropped. But whichever is the 2nd one
+/// // to be dropped will access the `Box` in the first one,
+/// // which is a use-after-free!
+/// }
+/// ```
+///
+/// The Nomicon discusses the need for [drop check in more detail][drop check].
+///
+/// To reject such code, the "drop check" analysis determines which types and lifetimes need to
+/// still be live when `T` gets dropped. The exact details of this analysis are not yet
+/// stably guaranteed and **subject to change**. Currently, the analysis works as follows:
+/// - If `T` has no drop glue, then trivially nothing is required to be live. This is the case if
+/// neither `T` nor any of its (recursive) fields have a destructor (`impl Drop`). [`PhantomData`]
+/// and [`ManuallyDrop`] are considered to never have a destructor, no matter their field type.
+/// - If `T` has drop glue, then, for all types `U` that are *owned* by any field of `T`,
+/// recursively add the types and lifetimes that need to be live when `U` gets dropped. The set of
+/// owned types is determined by recursively traversing `T`:
+/// - Recursively descend through `PhantomData`, `Box`, tuples, and arrays (including arrays of
+/// length 0).
+/// - Stop at reference and raw pointer types as well as function pointers and function items;
+/// they do not own anything.
+/// - Stop at non-composite types (type parameters that remain generic in the current context and
+/// base types such as integers and `bool`); these types are owned.
+/// - When hitting an ADT with `impl Drop`, stop there; this type is owned.
+/// - When hitting an ADT without `impl Drop`, recursively descend to its fields. (For an `enum`,
+/// consider all fields of all variants.)
+/// - Furthermore, if `T` implements `Drop`, then all generic (lifetime and type) parameters of `T`
+/// must be live.
+///
+/// In the above example, the last clause implies that `'a` must be live when `S<'a>` is dropped,
+/// and hence the example is rejected. If we remove the `impl Drop`, the liveness requirement
+/// disappears and the example is accepted.
+///
+/// There exists an unstable way for a type to opt-out of the last clause; this is called "drop
+/// check eyepatch" or `may_dangle`. For more details on this nightly-only feature, see the
+/// [discussion in the Nomicon][nomicon].
+///
+/// [`ManuallyDrop`]: crate::mem::ManuallyDrop
+/// [`PhantomData`]: crate::marker::PhantomData
+/// [drop check]: ../../nomicon/dropck.html
+/// [nomicon]: ../../nomicon/phantom-data.html#an-exception-the-special-case-of-the-standard-library-and-its-unstable-may_dangle
#[lang = "drop"]
#[stable(feature = "rust1", since = "1.0.0")]
#[const_trait]
diff --git a/library/core/src/ops/function.rs b/library/core/src/ops/function.rs
index b7e1aee9d..67c8245f0 100644
--- a/library/core/src/ops/function.rs
+++ b/library/core/src/ops/function.rs
@@ -254,10 +254,9 @@ mod impls {
use crate::marker::Tuple;
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
- impl<A: Tuple, F: ?Sized> const Fn<A> for &F
+ impl<A: Tuple, F: ?Sized> Fn<A> for &F
where
- F: ~const Fn<A>,
+ F: Fn<A>,
{
extern "rust-call" fn call(&self, args: A) -> F::Output {
(**self).call(args)
@@ -265,10 +264,9 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
- impl<A: Tuple, F: ?Sized> const FnMut<A> for &F
+ impl<A: Tuple, F: ?Sized> FnMut<A> for &F
where
- F: ~const Fn<A>,
+ F: Fn<A>,
{
extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output {
(**self).call(args)
@@ -276,10 +274,9 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
- impl<A: Tuple, F: ?Sized> const FnOnce<A> for &F
+ impl<A: Tuple, F: ?Sized> FnOnce<A> for &F
where
- F: ~const Fn<A>,
+ F: Fn<A>,
{
type Output = F::Output;
@@ -289,10 +286,9 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
- impl<A: Tuple, F: ?Sized> const FnMut<A> for &mut F
+ impl<A: Tuple, F: ?Sized> FnMut<A> for &mut F
where
- F: ~const FnMut<A>,
+ F: FnMut<A>,
{
extern "rust-call" fn call_mut(&mut self, args: A) -> F::Output {
(*self).call_mut(args)
@@ -300,10 +296,9 @@ mod impls {
}
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_fn_trait_ref_impls", issue = "101803")]
- impl<A: Tuple, F: ?Sized> const FnOnce<A> for &mut F
+ impl<A: Tuple, F: ?Sized> FnOnce<A> for &mut F
where
- F: ~const FnMut<A>,
+ F: FnMut<A>,
{
type Output = F::Output;
extern "rust-call" fn call_once(self, args: A) -> F::Output {
diff --git a/library/core/src/ops/index.rs b/library/core/src/ops/index.rs
index 228efb0bc..1f1784ec9 100644
--- a/library/core/src/ops/index.rs
+++ b/library/core/src/ops/index.rs
@@ -55,10 +55,10 @@
#[doc(alias = "]")]
#[doc(alias = "[")]
#[doc(alias = "[]")]
-#[const_trait]
pub trait Index<Idx: ?Sized> {
/// The returned type after indexing.
#[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_diagnostic_item = "IndexOutput"]
type Output: ?Sized;
/// Performs the indexing (`container[index]`) operation.
@@ -164,8 +164,7 @@ see chapter in The Book <https://doc.rust-lang.org/book/ch08-02-strings.html#ind
#[doc(alias = "[")]
#[doc(alias = "]")]
#[doc(alias = "[]")]
-#[const_trait]
-pub trait IndexMut<Idx: ?Sized>: ~const Index<Idx> {
+pub trait IndexMut<Idx: ?Sized>: Index<Idx> {
/// Performs the mutable indexing (`container[index]`) operation.
///
/// # Panics
diff --git a/library/core/src/ops/range.rs b/library/core/src/ops/range.rs
index b8ab26564..ba5e6ddc7 100644
--- a/library/core/src/ops/range.rs
+++ b/library/core/src/ops/range.rs
@@ -96,7 +96,7 @@ impl<Idx: fmt::Debug> fmt::Debug for Range<Idx> {
}
}
-impl<Idx: ~const PartialOrd<Idx>> Range<Idx> {
+impl<Idx: PartialOrd<Idx>> Range<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
@@ -116,11 +116,10 @@ impl<Idx: ~const PartialOrd<Idx>> Range<Idx> {
/// assert!(!(f32::NAN..1.0).contains(&0.5));
/// ```
#[stable(feature = "range_contains", since = "1.35.0")]
- #[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
- pub const fn contains<U>(&self, item: &U) -> bool
+ pub fn contains<U>(&self, item: &U) -> bool
where
- Idx: ~const PartialOrd<U>,
- U: ?Sized + ~const PartialOrd<Idx>,
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
@@ -143,8 +142,7 @@ impl<Idx: ~const PartialOrd<Idx>> Range<Idx> {
/// assert!( (f32::NAN..5.0).is_empty());
/// ```
#[stable(feature = "range_is_empty", since = "1.47.0")]
- #[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
- pub const fn is_empty(&self) -> bool {
+ pub fn is_empty(&self) -> bool {
!(self.start < self.end)
}
}
@@ -201,7 +199,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeFrom<Idx> {
}
}
-impl<Idx: ~const PartialOrd<Idx>> RangeFrom<Idx> {
+impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
@@ -216,11 +214,10 @@ impl<Idx: ~const PartialOrd<Idx>> RangeFrom<Idx> {
/// assert!(!(f32::NAN..).contains(&0.5));
/// ```
#[stable(feature = "range_contains", since = "1.35.0")]
- #[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
- pub const fn contains<U>(&self, item: &U) -> bool
+ pub fn contains<U>(&self, item: &U) -> bool
where
- Idx: ~const PartialOrd<U>,
- U: ?Sized + ~const PartialOrd<Idx>,
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
@@ -283,7 +280,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeTo<Idx> {
}
}
-impl<Idx: ~const PartialOrd<Idx>> RangeTo<Idx> {
+impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
@@ -298,11 +295,10 @@ impl<Idx: ~const PartialOrd<Idx>> RangeTo<Idx> {
/// assert!(!(..f32::NAN).contains(&0.5));
/// ```
#[stable(feature = "range_contains", since = "1.35.0")]
- #[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
- pub const fn contains<U>(&self, item: &U) -> bool
+ pub fn contains<U>(&self, item: &U) -> bool
where
- Idx: ~const PartialOrd<U>,
- U: ?Sized + ~const PartialOrd<Idx>,
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
@@ -474,7 +470,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeInclusive<Idx> {
}
}
-impl<Idx: ~const PartialOrd<Idx>> RangeInclusive<Idx> {
+impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
@@ -505,11 +501,10 @@ impl<Idx: ~const PartialOrd<Idx>> RangeInclusive<Idx> {
/// assert!(!r.contains(&3) && !r.contains(&5));
/// ```
#[stable(feature = "range_contains", since = "1.35.0")]
- #[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
- pub const fn contains<U>(&self, item: &U) -> bool
+ pub fn contains<U>(&self, item: &U) -> bool
where
- Idx: ~const PartialOrd<U>,
- U: ?Sized + ~const PartialOrd<Idx>,
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
@@ -541,9 +536,8 @@ impl<Idx: ~const PartialOrd<Idx>> RangeInclusive<Idx> {
/// assert!(r.is_empty());
/// ```
#[stable(feature = "range_is_empty", since = "1.47.0")]
- #[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
#[inline]
- pub const fn is_empty(&self) -> bool {
+ pub fn is_empty(&self) -> bool {
self.exhausted || !(self.start <= self.end)
}
}
@@ -605,7 +599,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeToInclusive<Idx> {
}
}
-impl<Idx: ~const PartialOrd<Idx>> RangeToInclusive<Idx> {
+impl<Idx: PartialOrd<Idx>> RangeToInclusive<Idx> {
/// Returns `true` if `item` is contained in the range.
///
/// # Examples
@@ -620,11 +614,10 @@ impl<Idx: ~const PartialOrd<Idx>> RangeToInclusive<Idx> {
/// assert!(!(..=f32::NAN).contains(&0.5));
/// ```
#[stable(feature = "range_contains", since = "1.35.0")]
- #[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
- pub const fn contains<U>(&self, item: &U) -> bool
+ pub fn contains<U>(&self, item: &U) -> bool
where
- Idx: ~const PartialOrd<U>,
- U: ?Sized + ~const PartialOrd<Idx>,
+ Idx: PartialOrd<U>,
+ U: ?Sized + PartialOrd<Idx>,
{
<Self as RangeBounds<Idx>>::contains(self, item)
}
@@ -765,7 +758,6 @@ impl<T: Clone> Bound<&T> {
/// `RangeBounds` is implemented by Rust's built-in range types, produced
/// by range syntax like `..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`.
#[stable(feature = "collections_range", since = "1.28.0")]
-#[const_trait]
pub trait RangeBounds<T: ?Sized> {
/// Start index bound.
///
@@ -818,8 +810,8 @@ pub trait RangeBounds<T: ?Sized> {
#[stable(feature = "range_contains", since = "1.35.0")]
fn contains<U>(&self, item: &U) -> bool
where
- T: ~const PartialOrd<U>,
- U: ?Sized + ~const PartialOrd<T>,
+ T: PartialOrd<U>,
+ U: ?Sized + PartialOrd<T>,
{
(match self.start_bound() {
Included(start) => start <= item,
@@ -836,8 +828,7 @@ pub trait RangeBounds<T: ?Sized> {
use self::Bound::{Excluded, Included, Unbounded};
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T: ?Sized> const RangeBounds<T> for RangeFull {
+impl<T: ?Sized> RangeBounds<T> for RangeFull {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
@@ -847,8 +838,7 @@ impl<T: ?Sized> const RangeBounds<T> for RangeFull {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeFrom<T> {
+impl<T> RangeBounds<T> for RangeFrom<T> {
fn start_bound(&self) -> Bound<&T> {
Included(&self.start)
}
@@ -858,8 +848,7 @@ impl<T> const RangeBounds<T> for RangeFrom<T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeTo<T> {
+impl<T> RangeBounds<T> for RangeTo<T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
@@ -869,8 +858,7 @@ impl<T> const RangeBounds<T> for RangeTo<T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for Range<T> {
+impl<T> RangeBounds<T> for Range<T> {
fn start_bound(&self) -> Bound<&T> {
Included(&self.start)
}
@@ -880,8 +868,7 @@ impl<T> const RangeBounds<T> for Range<T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeInclusive<T> {
+impl<T> RangeBounds<T> for RangeInclusive<T> {
fn start_bound(&self) -> Bound<&T> {
Included(&self.start)
}
@@ -897,8 +884,7 @@ impl<T> const RangeBounds<T> for RangeInclusive<T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeToInclusive<T> {
+impl<T> RangeBounds<T> for RangeToInclusive<T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
@@ -908,8 +894,7 @@ impl<T> const RangeBounds<T> for RangeToInclusive<T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for (Bound<T>, Bound<T>) {
+impl<T> RangeBounds<T> for (Bound<T>, Bound<T>) {
fn start_bound(&self) -> Bound<&T> {
match *self {
(Included(ref start), _) => Included(start),
@@ -928,8 +913,7 @@ impl<T> const RangeBounds<T> for (Bound<T>, Bound<T>) {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<'a, T: ?Sized + 'a> const RangeBounds<T> for (Bound<&'a T>, Bound<&'a T>) {
+impl<'a, T: ?Sized + 'a> RangeBounds<T> for (Bound<&'a T>, Bound<&'a T>) {
fn start_bound(&self) -> Bound<&T> {
self.0
}
@@ -940,8 +924,7 @@ impl<'a, T: ?Sized + 'a> const RangeBounds<T> for (Bound<&'a T>, Bound<&'a T>) {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeFrom<&T> {
+impl<T> RangeBounds<T> for RangeFrom<&T> {
fn start_bound(&self) -> Bound<&T> {
Included(self.start)
}
@@ -951,8 +934,7 @@ impl<T> const RangeBounds<T> for RangeFrom<&T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeTo<&T> {
+impl<T> RangeBounds<T> for RangeTo<&T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
@@ -962,8 +944,7 @@ impl<T> const RangeBounds<T> for RangeTo<&T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for Range<&T> {
+impl<T> RangeBounds<T> for Range<&T> {
fn start_bound(&self) -> Bound<&T> {
Included(self.start)
}
@@ -973,8 +954,7 @@ impl<T> const RangeBounds<T> for Range<&T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeInclusive<&T> {
+impl<T> RangeBounds<T> for RangeInclusive<&T> {
fn start_bound(&self) -> Bound<&T> {
Included(self.start)
}
@@ -984,8 +964,7 @@ impl<T> const RangeBounds<T> for RangeInclusive<&T> {
}
#[stable(feature = "collections_range", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
-impl<T> const RangeBounds<T> for RangeToInclusive<&T> {
+impl<T> RangeBounds<T> for RangeToInclusive<&T> {
fn start_bound(&self) -> Bound<&T> {
Unbounded
}
diff --git a/library/core/src/ops/try_trait.rs b/library/core/src/ops/try_trait.rs
index c254803fb..b4f69d0b2 100644
--- a/library/core/src/ops/try_trait.rs
+++ b/library/core/src/ops/try_trait.rs
@@ -128,8 +128,7 @@ use crate::ops::ControlFlow;
)]
#[doc(alias = "?")]
#[lang = "Try"]
-#[const_trait]
-pub trait Try: ~const FromResidual {
+pub trait Try: FromResidual {
/// The type of the value produced by `?` when *not* short-circuiting.
#[unstable(feature = "try_trait_v2", issue = "84277")]
type Output;
@@ -305,7 +304,6 @@ pub trait Try: ~const FromResidual {
)]
#[rustc_diagnostic_item = "FromResidual"]
#[unstable(feature = "try_trait_v2", issue = "84277")]
-#[const_trait]
pub trait FromResidual<R = <Self as Try>::Residual> {
/// Constructs the type from a compatible `Residual` type.
///
@@ -358,11 +356,10 @@ where
/// and in the other direction,
/// `<Result<Infallible, E> as Residual<T>>::TryType = Result<T, E>`.
#[unstable(feature = "try_trait_v2_residual", issue = "91285")]
-#[const_trait]
pub trait Residual<O> {
/// The "return" type of this meta-function.
#[unstable(feature = "try_trait_v2_residual", issue = "91285")]
- type TryType: ~const Try<Output = O, Residual = Self>;
+ type TryType: Try<Output = O, Residual = Self>;
}
#[unstable(feature = "pub_crate_should_not_need_unstable_attr", issue = "none")]
@@ -389,16 +386,14 @@ impl<T> NeverShortCircuit<T> {
}
#[inline]
- pub fn wrap_mut_2<A, B>(
- mut f: impl ~const FnMut(A, B) -> T,
- ) -> impl ~const FnMut(A, B) -> Self {
- const move |a, b| NeverShortCircuit(f(a, b))
+ pub fn wrap_mut_2<A, B>(mut f: impl FnMut(A, B) -> T) -> impl FnMut(A, B) -> Self {
+ move |a, b| NeverShortCircuit(f(a, b))
}
}
pub(crate) enum NeverShortCircuitResidual {}
-impl<T> const Try for NeverShortCircuit<T> {
+impl<T> Try for NeverShortCircuit<T> {
type Output = T;
type Residual = NeverShortCircuitResidual;
@@ -413,14 +408,14 @@ impl<T> const Try for NeverShortCircuit<T> {
}
}
-impl<T> const FromResidual for NeverShortCircuit<T> {
+impl<T> FromResidual for NeverShortCircuit<T> {
#[inline]
fn from_residual(never: NeverShortCircuitResidual) -> Self {
match never {}
}
}
-impl<T> const Residual<T> for NeverShortCircuitResidual {
+impl<T> Residual<T> for NeverShortCircuitResidual {
type TryType = NeverShortCircuit<T>;
}
diff --git a/library/core/src/option.rs b/library/core/src/option.rs
index 6f7bc6ed2..ec1ef3cf4 100644
--- a/library/core/src/option.rs
+++ b/library/core/src/option.rs
@@ -547,7 +547,6 @@
#![stable(feature = "rust1", since = "1.0.0")]
use crate::iter::{self, FromIterator, FusedIterator, TrustedLen};
-use crate::marker::Destruct;
use crate::panicking::{panic, panic_str};
use crate::pin::Pin;
use crate::{
@@ -559,7 +558,7 @@ use crate::{
/// The `Option` type. See [the module level documentation](self) for more.
#[derive(Copy, PartialOrd, Eq, Ord, Debug, Hash)]
#[rustc_diagnostic_item = "Option"]
-#[cfg_attr(not(bootstrap), lang = "Option")]
+#[lang = "Option"]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Option<T> {
/// No value.
@@ -766,13 +765,6 @@ impl<T> Option<T> {
#[must_use]
#[unstable(feature = "option_as_slice", issue = "108545")]
pub fn as_slice(&self) -> &[T] {
- #[cfg(bootstrap)]
- match self {
- Some(value) => slice::from_ref(value),
- None => &[],
- }
-
- #[cfg(not(bootstrap))]
// SAFETY: When the `Option` is `Some`, we're using the actual pointer
// to the payload, with a length of 1, so this is equivalent to
// `slice::from_ref`, and thus is safe.
@@ -833,13 +825,6 @@ impl<T> Option<T> {
#[must_use]
#[unstable(feature = "option_as_slice", issue = "108545")]
pub fn as_mut_slice(&mut self) -> &mut [T] {
- #[cfg(bootstrap)]
- match self {
- Some(value) => slice::from_mut(value),
- None => &mut [],
- }
-
- #[cfg(not(bootstrap))]
// SAFETY: When the `Option` is `Some`, we're using the actual pointer
// to the payload, with a length of 1, so this is equivalent to
// `slice::from_mut`, and thus is safe.
@@ -967,11 +952,7 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn unwrap_or(self, default: T) -> T
- where
- T: ~const Destruct,
- {
+ pub fn unwrap_or(self, default: T) -> T {
match self {
Some(x) => x,
None => default,
@@ -989,11 +970,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn unwrap_or_else<F>(self, f: F) -> T
+ pub fn unwrap_or_else<F>(self, f: F) -> T
where
- F: ~const FnOnce() -> T,
- F: ~const Destruct,
+ F: FnOnce() -> T,
{
match self {
Some(x) => x,
@@ -1022,14 +1001,13 @@ impl<T> Option<T> {
/// [`FromStr`]: crate::str::FromStr
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn unwrap_or_default(self) -> T
+ pub fn unwrap_or_default(self) -> T
where
- T: ~const Default,
+ T: Default,
{
match self {
Some(x) => x,
- None => Default::default(),
+ None => T::default(),
}
}
@@ -1089,11 +1067,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn map<U, F>(self, f: F) -> Option<U>
+ pub fn map<U, F>(self, f: F) -> Option<U>
where
- F: ~const FnOnce(T) -> U,
- F: ~const Destruct,
+ F: FnOnce(T) -> U,
{
match self {
Some(x) => Some(f(x)),
@@ -1118,11 +1094,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[unstable(feature = "result_option_inspect", issue = "91345")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn inspect<F>(self, f: F) -> Self
+ pub fn inspect<F>(self, f: F) -> Self
where
- F: ~const FnOnce(&T),
- F: ~const Destruct,
+ F: FnOnce(&T),
{
if let Some(ref x) = self {
f(x);
@@ -1151,12 +1125,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn map_or<U, F>(self, default: U, f: F) -> U
+ pub fn map_or<U, F>(self, default: U, f: F) -> U
where
- F: ~const FnOnce(T) -> U,
- F: ~const Destruct,
- U: ~const Destruct,
+ F: FnOnce(T) -> U,
{
match self {
Some(t) => f(t),
@@ -1180,13 +1151,10 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn map_or_else<U, D, F>(self, default: D, f: F) -> U
+ pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
where
- D: ~const FnOnce() -> U,
- D: ~const Destruct,
- F: ~const FnOnce(T) -> U,
- F: ~const Destruct,
+ D: FnOnce() -> U,
+ F: FnOnce(T) -> U,
{
match self {
Some(t) => f(t),
@@ -1217,11 +1185,7 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn ok_or<E>(self, err: E) -> Result<T, E>
- where
- E: ~const Destruct,
- {
+ pub fn ok_or<E>(self, err: E) -> Result<T, E> {
match self {
Some(v) => Ok(v),
None => Err(err),
@@ -1246,11 +1210,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn ok_or_else<E, F>(self, err: F) -> Result<T, E>
+ pub fn ok_or_else<E, F>(self, err: F) -> Result<T, E>
where
- F: ~const FnOnce() -> E,
- F: ~const Destruct,
+ F: FnOnce() -> E,
{
match self {
Some(v) => Ok(v),
@@ -1274,10 +1236,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "option_deref", since = "1.40.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn as_deref(&self) -> Option<&T::Target>
+ pub fn as_deref(&self) -> Option<&T::Target>
where
- T: ~const Deref,
+ T: Deref,
{
match self.as_ref() {
Some(t) => Some(t.deref()),
@@ -1301,10 +1262,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "option_deref", since = "1.40.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn as_deref_mut(&mut self) -> Option<&mut T::Target>
+ pub fn as_deref_mut(&mut self) -> Option<&mut T::Target>
where
- T: ~const DerefMut,
+ T: DerefMut,
{
match self.as_mut() {
Some(t) => Some(t.deref_mut()),
@@ -1388,12 +1348,7 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn and<U>(self, optb: Option<U>) -> Option<U>
- where
- T: ~const Destruct,
- U: ~const Destruct,
- {
+ pub fn and<U>(self, optb: Option<U>) -> Option<U> {
match self {
Some(_) => optb,
None => None,
@@ -1430,11 +1385,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn and_then<U, F>(self, f: F) -> Option<U>
+ pub fn and_then<U, F>(self, f: F) -> Option<U>
where
- F: ~const FnOnce(T) -> Option<U>,
- F: ~const Destruct,
+ F: FnOnce(T) -> Option<U>,
{
match self {
Some(x) => f(x),
@@ -1468,12 +1421,9 @@ impl<T> Option<T> {
/// [`Some(t)`]: Some
#[inline]
#[stable(feature = "option_filter", since = "1.27.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn filter<P>(self, predicate: P) -> Self
+ pub fn filter<P>(self, predicate: P) -> Self
where
- T: ~const Destruct,
- P: ~const FnOnce(&T) -> bool,
- P: ~const Destruct,
+ P: FnOnce(&T) -> bool,
{
if let Some(x) = self {
if predicate(&x) {
@@ -1512,11 +1462,7 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn or(self, optb: Option<T>) -> Option<T>
- where
- T: ~const Destruct,
- {
+ pub fn or(self, optb: Option<T>) -> Option<T> {
match self {
Some(x) => Some(x),
None => optb,
@@ -1538,11 +1484,9 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn or_else<F>(self, f: F) -> Option<T>
+ pub fn or_else<F>(self, f: F) -> Option<T>
where
- F: ~const FnOnce() -> Option<T>,
- F: ~const Destruct,
+ F: FnOnce() -> Option<T>,
{
match self {
Some(x) => Some(x),
@@ -1573,11 +1517,7 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "option_xor", since = "1.37.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn xor(self, optb: Option<T>) -> Option<T>
- where
- T: ~const Destruct,
- {
+ pub fn xor(self, optb: Option<T>) -> Option<T> {
match (self, optb) {
(Some(a), None) => Some(a),
(None, Some(b)) => Some(b),
@@ -1611,11 +1551,7 @@ impl<T> Option<T> {
#[must_use = "if you intended to set a value, consider assignment instead"]
#[inline]
#[stable(feature = "option_insert", since = "1.53.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn insert(&mut self, value: T) -> &mut T
- where
- T: ~const Destruct,
- {
+ pub fn insert(&mut self, value: T) -> &mut T {
*self = Some(value);
// SAFETY: the code above just filled the option
@@ -1644,11 +1580,7 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "option_entry", since = "1.20.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn get_or_insert(&mut self, value: T) -> &mut T
- where
- T: ~const Destruct,
- {
+ pub fn get_or_insert(&mut self, value: T) -> &mut T {
if let None = *self {
*self = Some(value);
}
@@ -1679,16 +1611,11 @@ impl<T> Option<T> {
/// ```
#[inline]
#[unstable(feature = "option_get_or_insert_default", issue = "82901")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn get_or_insert_default(&mut self) -> &mut T
+ pub fn get_or_insert_default(&mut self) -> &mut T
where
- T: ~const Default,
+ T: Default,
{
- const fn default<T: ~const Default>() -> T {
- T::default()
- }
-
- self.get_or_insert_with(default)
+ self.get_or_insert_with(T::default)
}
/// Inserts a value computed from `f` into the option if it is [`None`],
@@ -1710,16 +1637,12 @@ impl<T> Option<T> {
/// ```
#[inline]
#[stable(feature = "option_entry", since = "1.20.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn get_or_insert_with<F>(&mut self, f: F) -> &mut T
+ pub fn get_or_insert_with<F>(&mut self, f: F) -> &mut T
where
- F: ~const FnOnce() -> T,
- F: ~const Destruct,
+ F: FnOnce() -> T,
{
- if let None = *self {
- // the compiler isn't smart enough to know that we are not dropping a `T`
- // here and wants us to ensure `T` can be dropped at compile time.
- mem::forget(mem::replace(self, Some(f())))
+ if let None = self {
+ *self = Some(f());
}
// SAFETY: a `None` variant for `self` would have been replaced by a `Some`
@@ -1794,12 +1717,7 @@ impl<T> Option<T> {
/// assert_eq!(x.zip(z), None);
/// ```
#[stable(feature = "option_zip_option", since = "1.46.0")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn zip<U>(self, other: Option<U>) -> Option<(T, U)>
- where
- T: ~const Destruct,
- U: ~const Destruct,
- {
+ pub fn zip<U>(self, other: Option<U>) -> Option<(T, U)> {
match (self, other) {
(Some(a), Some(b)) => Some((a, b)),
_ => None,
@@ -1835,13 +1753,9 @@ impl<T> Option<T> {
/// assert_eq!(x.zip_with(None, Point::new), None);
/// ```
#[unstable(feature = "option_zip", issue = "70086")]
- #[rustc_const_unstable(feature = "const_option_ext", issue = "91930")]
- pub const fn zip_with<U, F, R>(self, other: Option<U>, f: F) -> Option<R>
+ pub fn zip_with<U, F, R>(self, other: Option<U>, f: F) -> Option<R>
where
- F: ~const FnOnce(T, U) -> R,
- F: ~const Destruct,
- T: ~const Destruct,
- U: ~const Destruct,
+ F: FnOnce(T, U) -> R,
{
match (self, other) {
(Some(a), Some(b)) => Some(f(a, b)),
@@ -1867,12 +1781,7 @@ impl<T, U> Option<(T, U)> {
/// ```
#[inline]
#[stable(feature = "unzip_option", since = "1.66.0")]
- #[rustc_const_unstable(feature = "const_option", issue = "67441")]
- pub const fn unzip(self) -> (Option<T>, Option<U>)
- where
- T: ~const Destruct,
- U: ~const Destruct,
- {
+ pub fn unzip(self) -> (Option<T>, Option<U>) {
match self {
Some((a, b)) => (Some(a), Some(b)),
None => (None, None),
@@ -1922,10 +1831,9 @@ impl<T> Option<&T> {
/// ```
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_option_cloned", issue = "91582")]
- pub const fn cloned(self) -> Option<T>
+ pub fn cloned(self) -> Option<T>
where
- T: ~const Clone,
+ T: Clone,
{
match self {
Some(t) => Some(t.clone()),
@@ -1974,10 +1882,9 @@ impl<T> Option<&mut T> {
/// ```
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(since = "1.26.0", feature = "option_ref_mut_cloned")]
- #[rustc_const_unstable(feature = "const_option_cloned", issue = "91582")]
- pub const fn cloned(self) -> Option<T>
+ pub fn cloned(self) -> Option<T>
where
- T: ~const Clone,
+ T: Clone,
{
match self {
Some(t) => Some(t.clone()),
@@ -2030,10 +1937,9 @@ const fn expect_failed(msg: &str) -> ! {
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_clone", issue = "91805")]
-impl<T> const Clone for Option<T>
+impl<T> Clone for Option<T>
where
- T: ~const Clone + ~const Destruct,
+ T: Clone,
{
#[inline]
fn clone(&self) -> Self {
@@ -2053,8 +1959,7 @@ where
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl<T> const Default for Option<T> {
+impl<T> Default for Option<T> {
/// Returns [`None`][Option::None].
///
/// # Examples
@@ -2114,8 +2019,7 @@ impl<'a, T> IntoIterator for &'a mut Option<T> {
}
#[stable(since = "1.12.0", feature = "option_from")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for Option<T> {
+impl<T> From<T> for Option<T> {
/// Moves `val` into a new [`Some`].
///
/// # Examples
@@ -2131,8 +2035,7 @@ impl<T> const From<T> for Option<T> {
}
#[stable(feature = "option_ref_from_ref_option", since = "1.30.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<'a, T> const From<&'a Option<T>> for Option<&'a T> {
+impl<'a, T> From<&'a Option<T>> for Option<&'a T> {
/// Converts from `&Option<T>` to `Option<&T>`.
///
/// # Examples
@@ -2159,8 +2062,7 @@ impl<'a, T> const From<&'a Option<T>> for Option<&'a T> {
}
#[stable(feature = "option_ref_from_ref_option", since = "1.30.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<'a, T> const From<&'a mut Option<T>> for Option<&'a mut T> {
+impl<'a, T> From<&'a mut Option<T>> for Option<&'a mut T> {
/// Converts from `&mut Option<T>` to `Option<&mut T>`
///
/// # Examples
@@ -2507,8 +2409,7 @@ impl<A, V: FromIterator<A>> FromIterator<Option<A>> for Option<V> {
}
#[unstable(feature = "try_trait_v2", issue = "84277")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const ops::Try for Option<T> {
+impl<T> ops::Try for Option<T> {
type Output = T;
type Residual = Option<convert::Infallible>;
@@ -2527,8 +2428,7 @@ impl<T> const ops::Try for Option<T> {
}
#[unstable(feature = "try_trait_v2", issue = "84277")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const ops::FromResidual for Option<T> {
+impl<T> ops::FromResidual for Option<T> {
#[inline]
fn from_residual(residual: Option<convert::Infallible>) -> Self {
match residual {
@@ -2546,8 +2446,7 @@ impl<T> ops::FromResidual<ops::Yeet<()>> for Option<T> {
}
#[unstable(feature = "try_trait_v2_residual", issue = "91285")]
-#[rustc_const_unstable(feature = "const_try", issue = "74935")]
-impl<T> const ops::Residual<T> for Option<convert::Infallible> {
+impl<T> ops::Residual<T> for Option<convert::Infallible> {
type TryType = Option<T>;
}
diff --git a/library/core/src/panic.rs b/library/core/src/panic.rs
index 8338a5d7e..20be60d35 100644
--- a/library/core/src/panic.rs
+++ b/library/core/src/panic.rs
@@ -28,16 +28,18 @@ pub macro panic_2015 {
$crate::panicking::panic($msg)
),
// Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
- ($msg:expr $(,)?) => (
- $crate::panicking::panic_str($msg)
- ),
+ ($msg:expr $(,)?) => ({
+ $crate::panicking::panic_str($msg);
+ }),
// Special-case the single-argument case for const_panic.
- ("{}", $arg:expr $(,)?) => (
- $crate::panicking::panic_display(&$arg)
- ),
- ($fmt:expr, $($arg:tt)+) => (
- $crate::panicking::panic_fmt($crate::const_format_args!($fmt, $($arg)+))
- ),
+ ("{}", $arg:expr $(,)?) => ({
+ $crate::panicking::panic_display(&$arg);
+ }),
+ ($fmt:expr, $($arg:tt)+) => ({
+ // Semicolon to prevent temporaries inside the formatting machinery from
+ // being considered alive in the caller after the panic_fmt call.
+ $crate::panicking::panic_fmt($crate::const_format_args!($fmt, $($arg)+));
+ }),
}
#[doc(hidden)]
@@ -50,12 +52,14 @@ pub macro panic_2021 {
$crate::panicking::panic("explicit panic")
),
// Special-case the single-argument case for const_panic.
- ("{}", $arg:expr $(,)?) => (
- $crate::panicking::panic_display(&$arg)
- ),
- ($($t:tt)+) => (
- $crate::panicking::panic_fmt($crate::const_format_args!($($t)+))
- ),
+ ("{}", $arg:expr $(,)?) => ({
+ $crate::panicking::panic_display(&$arg);
+ }),
+ ($($t:tt)+) => ({
+ // Semicolon to prevent temporaries inside the formatting machinery from
+ // being considered alive in the caller after the panic_fmt call.
+ $crate::panicking::panic_fmt($crate::const_format_args!($($t)+));
+ }),
}
#[doc(hidden)]
@@ -69,9 +73,9 @@ pub macro unreachable_2015 {
),
// Use of `unreachable_display` for non_fmt_panic lint.
// NOTE: the message ("internal error ...") is embedded directly in unreachable_display
- ($msg:expr $(,)?) => (
- $crate::panicking::unreachable_display(&$msg)
- ),
+ ($msg:expr $(,)?) => ({
+ $crate::panicking::unreachable_display(&$msg);
+ }),
($fmt:expr, $($arg:tt)*) => (
$crate::panic!($crate::concat!("internal error: entered unreachable code: ", $fmt), $($arg)*)
),
diff --git a/library/core/src/panic/panic_info.rs b/library/core/src/panic/panic_info.rs
index 06fbe083c..5576adde8 100644
--- a/library/core/src/panic/panic_info.rs
+++ b/library/core/src/panic/panic_info.rs
@@ -134,7 +134,7 @@ impl<'a> PanicInfo<'a> {
/// whose ABI does not support unwinding.
///
/// It is safe for a panic handler to unwind even when this function returns
- /// true, however this will simply cause the panic handler to be called
+ /// false, however this will simply cause the panic handler to be called
/// again.
#[must_use]
#[unstable(feature = "panic_can_unwind", issue = "92988")]
diff --git a/library/core/src/panicking.rs b/library/core/src/panicking.rs
index efeb726ab..81be3fb22 100644
--- a/library/core/src/panicking.rs
+++ b/library/core/src/panicking.rs
@@ -165,7 +165,7 @@ fn panic_bounds_check(index: usize, len: usize) -> ! {
#[cold]
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
#[track_caller]
-#[cfg_attr(not(bootstrap), lang = "panic_misaligned_pointer_dereference")] // needed by codegen for panic on misaligned pointer deref
+#[lang = "panic_misaligned_pointer_dereference"] // needed by codegen for panic on misaligned pointer deref
fn panic_misaligned_pointer_dereference(required: usize, found: usize) -> ! {
if cfg!(feature = "panic_immediate_abort") {
super::intrinsics::abort()
diff --git a/library/core/src/pin.rs b/library/core/src/pin.rs
index c4b89a630..6b319b435 100644
--- a/library/core/src/pin.rs
+++ b/library/core/src/pin.rs
@@ -393,6 +393,8 @@ use crate::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Receiver};
/// value in place, preventing the value referenced by that pointer from being moved
/// unless it implements [`Unpin`].
///
+/// `Pin<P>` is guaranteed to have the same memory layout and ABI as `P`.
+///
/// *See the [`pin` module] documentation for an explanation of pinning.*
///
/// [`pin` module]: self
diff --git a/library/core/src/primitive_docs.rs b/library/core/src/primitive_docs.rs
index 3df990e5d..8266e8990 100644
--- a/library/core/src/primitive_docs.rs
+++ b/library/core/src/primitive_docs.rs
@@ -1,8 +1,7 @@
// `library/{std,core}/src/primitive_docs.rs` should have the same contents.
// These are different files so that relative links work properly without
// having to have `CARGO_PKG_NAME` set, but conceptually they should always be the same.
-#[cfg_attr(bootstrap, doc(primitive = "bool"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "bool")]
+#[rustc_doc_primitive = "bool"]
#[doc(alias = "true")]
#[doc(alias = "false")]
/// The boolean type.
@@ -64,8 +63,7 @@
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_bool {}
-#[cfg_attr(bootstrap, doc(primitive = "never"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "never")]
+#[rustc_doc_primitive = "never"]
#[doc(alias = "!")]
//
/// The `!` type, also called "never".
@@ -276,8 +274,7 @@ mod prim_bool {}
#[unstable(feature = "never_type", issue = "35121")]
mod prim_never {}
-#[cfg_attr(bootstrap, doc(primitive = "char"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "char")]
+#[rustc_doc_primitive = "char"]
#[allow(rustdoc::invalid_rust_codeblocks)]
/// A character type.
///
@@ -401,8 +398,7 @@ mod prim_never {}
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_char {}
-#[cfg_attr(bootstrap, doc(primitive = "unit"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "unit")]
+#[rustc_doc_primitive = "unit"]
#[doc(alias = "(")]
#[doc(alias = ")")]
#[doc(alias = "()")]
@@ -464,8 +460,7 @@ impl Copy for () {
// empty
}
-#[cfg_attr(bootstrap, doc(primitive = "pointer"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "pointer")]
+#[rustc_doc_primitive = "pointer"]
#[doc(alias = "ptr")]
#[doc(alias = "*")]
#[doc(alias = "*const")]
@@ -555,6 +550,7 @@ impl Copy for () {
///
/// ```
/// # #![feature(rustc_private)]
+/// #[allow(unused_extern_crates)]
/// extern crate libc;
///
/// use std::mem;
@@ -581,8 +577,7 @@ impl Copy for () {
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_pointer {}
-#[cfg_attr(bootstrap, doc(primitive = "array"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "array")]
+#[rustc_doc_primitive = "array"]
#[doc(alias = "[]")]
#[doc(alias = "[T;N]")] // unfortunately, rustdoc doesn't have fuzzy search for aliases
#[doc(alias = "[T; N]")]
@@ -616,6 +611,9 @@ mod prim_pointer {}
/// if the element type allows it. As a stopgap, trait implementations are
/// statically generated up to size 32.
///
+/// Arrays of sizes from 1 to 12 (inclusive) implement [`From<Tuple>`], where `Tuple`
+/// is a homogenous [prim@tuple] of appropriate length.
+///
/// Arrays coerce to [slices (`[T]`)][slice], so a slice method may be called on
/// an array. Indeed, this provides most of the API for working with arrays.
///
@@ -678,6 +676,13 @@ mod prim_pointer {}
/// move_away(roa);
/// ```
///
+/// Arrays can be created from homogenous tuples of appropriate length:
+///
+/// ```
+/// let tuple: (u32, u32, u32) = (1, 2, 3);
+/// let array: [u32; 3] = tuple.into();
+/// ```
+///
/// # Editions
///
/// Prior to Rust 1.53, arrays did not implement [`IntoIterator`] by value, so the method call
@@ -780,11 +785,11 @@ mod prim_pointer {}
/// [`Borrow`]: borrow::Borrow
/// [`BorrowMut`]: borrow::BorrowMut
/// [slice pattern]: ../reference/patterns.html#slice-patterns
+/// [`From<Tuple>`]: convert::From
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_array {}
-#[cfg_attr(bootstrap, doc(primitive = "slice"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "slice")]
+#[rustc_doc_primitive = "slice"]
#[doc(alias = "[")]
#[doc(alias = "]")]
#[doc(alias = "[]")]
@@ -876,8 +881,7 @@ mod prim_array {}
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_slice {}
-#[cfg_attr(bootstrap, doc(primitive = "str"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "str")]
+#[rustc_doc_primitive = "str"]
/// String slices.
///
/// *[See also the `std::str` module](crate::str).*
@@ -944,8 +948,7 @@ mod prim_slice {}
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_str {}
-#[cfg_attr(bootstrap, doc(primitive = "tuple"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "tuple")]
+#[rustc_doc_primitive = "tuple"]
#[doc(alias = "(")]
#[doc(alias = ")")]
#[doc(alias = "()")]
@@ -1009,7 +1012,9 @@ mod prim_str {}
/// * [`Debug`]
/// * [`Default`]
/// * [`Hash`]
+/// * [`From<[T; N]>`][from]
///
+/// [from]: convert::From
/// [`Debug`]: fmt::Debug
/// [`Hash`]: hash::Hash
///
@@ -1060,6 +1065,13 @@ mod prim_str {}
/// assert_eq!(y, 5);
/// ```
///
+/// Homogenous tuples can be created from arrays of appropriate length:
+///
+/// ```
+/// let array: [u32; 3] = [1, 2, 3];
+/// let tuple: (u32, u32, u32) = array.into();
+/// ```
+///
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_tuple {}
@@ -1088,8 +1100,7 @@ impl<T: Copy> Copy for (T,) {
// empty
}
-#[cfg_attr(bootstrap, doc(primitive = "f32"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "f32")]
+#[rustc_doc_primitive = "f32"]
/// A 32-bit floating point type (specifically, the "binary32" type defined in IEEE 754-2008).
///
/// This type can represent a wide range of decimal numbers, like `3.5`, `27`,
@@ -1155,8 +1166,7 @@ impl<T: Copy> Copy for (T,) {
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_f32 {}
-#[cfg_attr(bootstrap, doc(primitive = "f64"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "f64")]
+#[rustc_doc_primitive = "f64"]
/// A 64-bit floating point type (specifically, the "binary64" type defined in IEEE 754-2008).
///
/// This type is very similar to [`f32`], but has increased
@@ -1171,78 +1181,67 @@ mod prim_f32 {}
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_f64 {}
-#[cfg_attr(bootstrap, doc(primitive = "i8"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "i8")]
+#[rustc_doc_primitive = "i8"]
//
/// The 8-bit signed integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_i8 {}
-#[cfg_attr(bootstrap, doc(primitive = "i16"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "i16")]
+#[rustc_doc_primitive = "i16"]
//
/// The 16-bit signed integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_i16 {}
-#[cfg_attr(bootstrap, doc(primitive = "i32"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "i32")]
+#[rustc_doc_primitive = "i32"]
//
/// The 32-bit signed integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_i32 {}
-#[cfg_attr(bootstrap, doc(primitive = "i64"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "i64")]
+#[rustc_doc_primitive = "i64"]
//
/// The 64-bit signed integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_i64 {}
-#[cfg_attr(bootstrap, doc(primitive = "i128"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "i128")]
+#[rustc_doc_primitive = "i128"]
//
/// The 128-bit signed integer type.
#[stable(feature = "i128", since = "1.26.0")]
mod prim_i128 {}
-#[cfg_attr(bootstrap, doc(primitive = "u8"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "u8")]
+#[rustc_doc_primitive = "u8"]
//
/// The 8-bit unsigned integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_u8 {}
-#[cfg_attr(bootstrap, doc(primitive = "u16"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "u16")]
+#[rustc_doc_primitive = "u16"]
//
/// The 16-bit unsigned integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_u16 {}
-#[cfg_attr(bootstrap, doc(primitive = "u32"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "u32")]
+#[rustc_doc_primitive = "u32"]
//
/// The 32-bit unsigned integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_u32 {}
-#[cfg_attr(bootstrap, doc(primitive = "u64"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "u64")]
+#[rustc_doc_primitive = "u64"]
//
/// The 64-bit unsigned integer type.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_u64 {}
-#[cfg_attr(bootstrap, doc(primitive = "u128"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "u128")]
+#[rustc_doc_primitive = "u128"]
//
/// The 128-bit unsigned integer type.
#[stable(feature = "i128", since = "1.26.0")]
mod prim_u128 {}
-#[cfg_attr(bootstrap, doc(primitive = "isize"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "isize")]
+#[rustc_doc_primitive = "isize"]
//
/// The pointer-sized signed integer type.
///
@@ -1252,8 +1251,7 @@ mod prim_u128 {}
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_isize {}
-#[cfg_attr(bootstrap, doc(primitive = "usize"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "usize")]
+#[rustc_doc_primitive = "usize"]
//
/// The pointer-sized unsigned integer type.
///
@@ -1263,8 +1261,7 @@ mod prim_isize {}
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_usize {}
-#[cfg_attr(bootstrap, doc(primitive = "reference"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "reference")]
+#[rustc_doc_primitive = "reference"]
#[doc(alias = "&")]
#[doc(alias = "&mut")]
//
@@ -1396,8 +1393,7 @@ mod prim_usize {}
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_ref {}
-#[cfg_attr(bootstrap, doc(primitive = "fn"))]
-#[cfg_attr(not(bootstrap), rustc_doc_primitive = "fn")]
+#[rustc_doc_primitive = "fn"]
//
/// Function pointers, like `fn(usize) -> bool`.
///
diff --git a/library/core/src/ptr/alignment.rs b/library/core/src/ptr/alignment.rs
index efe6d4183..bbf7199ff 100644
--- a/library/core/src/ptr/alignment.rs
+++ b/library/core/src/ptr/alignment.rs
@@ -9,8 +9,7 @@ use crate::{cmp, fmt, hash, mem, num};
/// Note that particularly large alignments, while representable in this type,
/// are likely not to be supported by actual allocators and linkers.
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
-#[derive(Copy, Clone, Eq)]
-#[derive_const(PartialEq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[repr(transparent)]
pub struct Alignment(AlignmentEnum);
@@ -170,7 +169,7 @@ impl From<Alignment> for usize {
#[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
-impl const cmp::Ord for Alignment {
+impl cmp::Ord for Alignment {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
self.as_nonzero().get().cmp(&other.as_nonzero().get())
@@ -179,7 +178,7 @@ impl const cmp::Ord for Alignment {
#[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[unstable(feature = "ptr_alignment_type", issue = "102070")]
-impl const cmp::PartialOrd for Alignment {
+impl cmp::PartialOrd for Alignment {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
@@ -201,8 +200,7 @@ type AlignmentEnum = AlignmentEnum32;
#[cfg(target_pointer_width = "64")]
type AlignmentEnum = AlignmentEnum64;
-#[derive(Copy, Clone, Eq)]
-#[derive_const(PartialEq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[repr(u16)]
enum AlignmentEnum16 {
_Align1Shl0 = 1 << 0,
@@ -223,8 +221,7 @@ enum AlignmentEnum16 {
_Align1Shl15 = 1 << 15,
}
-#[derive(Copy, Clone, Eq)]
-#[derive_const(PartialEq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
enum AlignmentEnum32 {
_Align1Shl0 = 1 << 0,
@@ -261,8 +258,7 @@ enum AlignmentEnum32 {
_Align1Shl31 = 1 << 31,
}
-#[derive(Copy, Clone, Eq)]
-#[derive_const(PartialEq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[repr(u64)]
enum AlignmentEnum64 {
_Align1Shl0 = 1 << 0,
diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs
index 839afc57f..6e1e862d3 100644
--- a/library/core/src/ptr/const_ptr.rs
+++ b/library/core/src/ptr/const_ptr.rs
@@ -132,8 +132,8 @@ impl<T: ?Sized> *const T {
/// ```
#[unstable(feature = "ptr_to_from_bits", issue = "91126")]
#[deprecated(
- since = "1.67",
- note = "replaced by the `exposed_addr` method, or update your code \
+ since = "1.67.0",
+ note = "replaced by the `expose_addr` method, or update your code \
to follow the strict provenance rules using its APIs"
)]
#[inline(always)]
@@ -161,7 +161,7 @@ impl<T: ?Sized> *const T {
/// ```
#[unstable(feature = "ptr_to_from_bits", issue = "91126")]
#[deprecated(
- since = "1.67",
+ since = "1.67.0",
note = "replaced by the `ptr::from_exposed_addr` function, or update \
your code to follow the strict provenance rules using its APIs"
)]
@@ -264,7 +264,7 @@ impl<T: ?Sized> *const T {
let dest_addr = addr as isize;
let offset = dest_addr.wrapping_sub(self_addr);
- // This is the canonical desugarring of this operation
+ // This is the canonical desugaring of this operation
self.wrapping_byte_offset(offset)
}
@@ -916,8 +916,16 @@ impl<T: ?Sized> *const T {
where
T: Sized,
{
+ #[cfg(bootstrap)]
// SAFETY: the caller must uphold the safety contract for `offset`.
- unsafe { self.offset(count as isize) }
+ unsafe {
+ self.offset(count as isize)
+ }
+ #[cfg(not(bootstrap))]
+ // SAFETY: the caller must uphold the safety contract for `offset`.
+ unsafe {
+ intrinsics::offset(self, count)
+ }
}
/// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
@@ -1187,7 +1195,7 @@ impl<T: ?Sized> *const T {
///
/// [`ptr::read`]: crate::ptr::read()
#[stable(feature = "pointer_methods", since = "1.26.0")]
- #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
+ #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn read(self) -> T
@@ -1228,7 +1236,7 @@ impl<T: ?Sized> *const T {
///
/// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
#[stable(feature = "pointer_methods", since = "1.26.0")]
- #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
+ #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn read_unaligned(self) -> T
@@ -1650,11 +1658,10 @@ impl<T> *const [T] {
/// }
/// ```
#[unstable(feature = "slice_ptr_get", issue = "74265")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
- pub const unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
+ pub unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
where
- I: ~const SliceIndex<[T]>,
+ I: SliceIndex<[T]>,
{
// SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
unsafe { index.get_unchecked(self) }
diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs
index 818f1a919..d0cb2f715 100644
--- a/library/core/src/ptr/mod.rs
+++ b/library/core/src/ptr/mod.rs
@@ -374,6 +374,7 @@ use crate::hash;
use crate::intrinsics::{
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping,
};
+use crate::marker::FnPtr;
use crate::mem::{self, MaybeUninit};
@@ -440,10 +441,18 @@ mod mut_ptr;
///
/// * `to_drop` must be [valid] for both reads and writes.
///
-/// * `to_drop` must be properly aligned.
+/// * `to_drop` must be properly aligned, even if `T` has size 0.
///
-/// * The value `to_drop` points to must be valid for dropping, which may mean it must uphold
-/// additional invariants - this is type-dependent.
+/// * `to_drop` must be nonnull, even if `T` has size 0.
+///
+/// * The value `to_drop` points to must be valid for dropping, which may mean
+/// it must uphold additional invariants. These invariants depend on the type
+/// of the value being dropped. For instance, when dropping a Box, the box's
+/// pointer to the heap must be valid.
+///
+/// * While `drop_in_place` is executing, the only way to access parts of
+/// `to_drop` is through the `&mut self` references supplied to the
+/// `Drop::drop` methods that `drop_in_place` invokes.
///
/// Additionally, if `T` is not [`Copy`], using the pointed-to value after
/// calling `drop_in_place` can cause undefined behavior. Note that `*to_drop =
@@ -451,8 +460,6 @@ mod mut_ptr;
/// again. [`write()`] can be used to overwrite data without causing it to be
/// dropped.
///
-/// Note that even if `T` has size `0`, the pointer must be non-null and properly aligned.
-///
/// [valid]: self#safety
///
/// # Examples
@@ -1132,7 +1139,8 @@ pub const unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
/// [valid]: self#safety
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
+#[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
+#[rustc_allow_const_fn_unstable(const_mut_refs, const_maybe_uninit_as_mut_ptr)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn read<T>(src: *const T) -> T {
// It would be semantically correct to implement this via `copy_nonoverlapping`
@@ -1167,26 +1175,7 @@ pub const unsafe fn read<T>(src: *const T) -> T {
"ptr::read requires that the pointer argument is aligned and non-null",
[T](src: *const T) => is_aligned_and_not_null(src)
);
-
- #[cfg(bootstrap)]
- {
- // We are calling the intrinsics directly to avoid function calls in the
- // generated code as `intrinsics::copy_nonoverlapping` is a wrapper function.
- extern "rust-intrinsic" {
- #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
- fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
- }
-
- // `src` cannot overlap `tmp` because `tmp` was just allocated on
- // the stack as a separate allocated object.
- let mut tmp = MaybeUninit::<T>::uninit();
- copy_nonoverlapping(src, tmp.as_mut_ptr(), 1);
- tmp.assume_init()
- }
- #[cfg(not(bootstrap))]
- {
- crate::intrinsics::read_via_copy(src)
- }
+ crate::intrinsics::read_via_copy(src)
}
}
@@ -1267,7 +1256,8 @@ pub const unsafe fn read<T>(src: *const T) -> T {
/// ```
#[inline]
#[stable(feature = "ptr_unaligned", since = "1.17.0")]
-#[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
+#[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
+#[rustc_allow_const_fn_unstable(const_mut_refs, const_maybe_uninit_as_mut_ptr)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn read_unaligned<T>(src: *const T) -> T {
let mut tmp = MaybeUninit::<T>::uninit();
@@ -1367,13 +1357,13 @@ pub const unsafe fn read_unaligned<T>(src: *const T) -> T {
#[rustc_const_unstable(feature = "const_ptr_write", issue = "86302")]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn write<T>(dst: *mut T, src: T) {
- // We are calling the intrinsics directly to avoid function calls in the generated code
- // as `intrinsics::copy_nonoverlapping` is a wrapper function.
- extern "rust-intrinsic" {
- #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.63.0")]
- #[rustc_nounwind]
- fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
- }
+ // Semantically, it would be fine for this to be implemented as a
+ // `copy_nonoverlapping` and appropriate drop suppression of `src`.
+
+ // However, implementing via that currently produces more MIR than is ideal.
+ // Using an intrinsic keeps it down to just the simple `*dst = move src` in
+ // MIR (11 statements shorter, at the time of writing), and also allows
+ // `src` to stay an SSA value in codegen_ssa, rather than a memory one.
// SAFETY: the caller must guarantee that `dst` is valid for writes.
// `dst` cannot overlap `src` because the caller has mutable access
@@ -1383,8 +1373,7 @@ pub const unsafe fn write<T>(dst: *mut T, src: T) {
"ptr::write requires that the pointer argument is aligned and non-null",
[T](dst: *mut T) => is_aligned_and_not_null(dst)
);
- copy_nonoverlapping(&src as *const T, dst, 1);
- intrinsics::forget(src);
+ intrinsics::write_via_move(dst, src)
}
}
@@ -1651,8 +1640,8 @@ pub(crate) const unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usiz
// FIXME(#75598): Direct use of these intrinsics improves codegen significantly at opt-level <=
// 1, where the method versions of these operations are not inlined.
use intrinsics::{
- cttz_nonzero, exact_div, mul_with_overflow, unchecked_rem, unchecked_shl, unchecked_shr,
- unchecked_sub, wrapping_add, wrapping_mul, wrapping_sub,
+ assume, cttz_nonzero, exact_div, mul_with_overflow, unchecked_rem, unchecked_shl,
+ unchecked_shr, unchecked_sub, wrapping_add, wrapping_mul, wrapping_sub,
};
/// Calculate multiplicative modular inverse of `x` modulo `m`.
@@ -1743,12 +1732,18 @@ pub(crate) const unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usiz
// in a branch-free way and then bitwise-OR it with whatever result the `-p mod a`
// computation produces.
+ let aligned_address = wrapping_add(addr, a_minus_one) & wrapping_sub(0, a);
+ let byte_offset = wrapping_sub(aligned_address, addr);
+ // FIXME: Remove the assume after <https://github.com/llvm/llvm-project/issues/62502>
+ // SAFETY: Masking by `-a` can only affect the low bits, and thus cannot have reduced
+ // the value by more than `a-1`, so even though the intermediate values might have
+ // wrapped, the byte_offset is always in `[0, a)`.
+ unsafe { assume(byte_offset < a) };
+
// SAFETY: `stride == 0` case has been handled by the special case above.
let addr_mod_stride = unsafe { unchecked_rem(addr, stride) };
return if addr_mod_stride == 0 {
- let aligned_address = wrapping_add(addr, a_minus_one) & wrapping_sub(0, a);
- let byte_offset = wrapping_sub(aligned_address, addr);
// SAFETY: `stride` is non-zero. This is guaranteed to divide exactly as well, because
// addr has been verified to be aligned to the original type’s alignment requirements.
unsafe { exact_div(byte_offset, stride) }
@@ -1764,7 +1759,12 @@ pub(crate) const unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usiz
// miracles, given the situations this case has to deal with.
// SAFETY: a is power-of-two hence non-zero. stride == 0 case is handled above.
- let gcdpow = unsafe { cttz_nonzero(stride).min(cttz_nonzero(a)) };
+ // FIXME(const-hack) replace with min
+ let gcdpow = unsafe {
+ let x = cttz_nonzero(stride);
+ let y = cttz_nonzero(a);
+ if x < y { x } else { y }
+ };
// SAFETY: gcdpow has an upper-bound that’s at most the number of bits in a usize.
let gcd = unsafe { unchecked_shl(1usize, gcdpow) };
// SAFETY: gcd is always greater or equal to 1.
@@ -1892,205 +1892,52 @@ pub fn hash<T: ?Sized, S: hash::Hasher>(hashee: *const T, into: &mut S) {
hashee.hash(into);
}
-#[cfg(bootstrap)]
-mod old_fn_ptr_impl {
- use super::*;
- // If this is a unary fn pointer, it adds a doc comment.
- // Otherwise, it hides the docs entirely.
- macro_rules! maybe_fnptr_doc {
- (@ #[$meta:meta] $item:item) => {
- #[doc(hidden)]
- #[$meta]
- $item
- };
- ($a:ident @ #[$meta:meta] $item:item) => {
- #[doc(fake_variadic)]
- #[doc = "This trait is implemented for function pointers with up to twelve arguments."]
- #[$meta]
- $item
- };
- ($a:ident $($rest_a:ident)+ @ #[$meta:meta] $item:item) => {
- #[doc(hidden)]
- #[$meta]
- $item
- };
- }
-
- // FIXME(strict_provenance_magic): function pointers have buggy codegen that
- // necessitates casting to a usize to get the backend to do the right thing.
- // for now I will break AVR to silence *a billion* lints. We should probably
- // have a proper "opaque function pointer type" to handle this kind of thing.
-
- // Impls for function pointers
- macro_rules! fnptr_impls_safety_abi {
- ($FnTy: ty, $($Arg: ident),*) => {
- fnptr_impls_safety_abi! { #[stable(feature = "fnptr_impls", since = "1.4.0")] $FnTy, $($Arg),* }
- };
- (@c_unwind $FnTy: ty, $($Arg: ident),*) => {
- fnptr_impls_safety_abi! { #[unstable(feature = "c_unwind", issue = "74990")] $FnTy, $($Arg),* }
- };
- (#[$meta:meta] $FnTy: ty, $($Arg: ident),*) => {
- maybe_fnptr_doc! {
- $($Arg)* @
- #[$meta]
- impl<Ret, $($Arg),*> PartialEq for $FnTy {
- #[inline]
- fn eq(&self, other: &Self) -> bool {
- *self as usize == *other as usize
- }
- }
- }
-
- maybe_fnptr_doc! {
- $($Arg)* @
- #[$meta]
- impl<Ret, $($Arg),*> Eq for $FnTy {}
- }
-
- maybe_fnptr_doc! {
- $($Arg)* @
- #[$meta]
- impl<Ret, $($Arg),*> PartialOrd for $FnTy {
- #[inline]
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- (*self as usize).partial_cmp(&(*other as usize))
- }
- }
- }
-
- maybe_fnptr_doc! {
- $($Arg)* @
- #[$meta]
- impl<Ret, $($Arg),*> Ord for $FnTy {
- #[inline]
- fn cmp(&self, other: &Self) -> Ordering {
- (*self as usize).cmp(&(*other as usize))
- }
- }
- }
-
- maybe_fnptr_doc! {
- $($Arg)* @
- #[$meta]
- impl<Ret, $($Arg),*> hash::Hash for $FnTy {
- fn hash<HH: hash::Hasher>(&self, state: &mut HH) {
- state.write_usize(*self as usize)
- }
- }
- }
-
- maybe_fnptr_doc! {
- $($Arg)* @
- #[$meta]
- impl<Ret, $($Arg),*> fmt::Pointer for $FnTy {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::pointer_fmt_inner(*self as usize, f)
- }
- }
- }
-
- maybe_fnptr_doc! {
- $($Arg)* @
- #[$meta]
- impl<Ret, $($Arg),*> fmt::Debug for $FnTy {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::pointer_fmt_inner(*self as usize, f)
- }
- }
- }
- }
- }
-
- macro_rules! fnptr_impls_args {
- ($($Arg: ident),+) => {
- fnptr_impls_safety_abi! { extern "Rust" fn($($Arg),+) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { extern "C" fn($($Arg),+) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { extern "C" fn($($Arg),+ , ...) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { @c_unwind extern "C-unwind" fn($($Arg),+) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { @c_unwind extern "C-unwind" fn($($Arg),+ , ...) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { unsafe extern "Rust" fn($($Arg),+) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { unsafe extern "C" fn($($Arg),+) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { unsafe extern "C" fn($($Arg),+ , ...) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { @c_unwind unsafe extern "C-unwind" fn($($Arg),+) -> Ret, $($Arg),+ }
- fnptr_impls_safety_abi! { @c_unwind unsafe extern "C-unwind" fn($($Arg),+ , ...) -> Ret, $($Arg),+ }
- };
- () => {
- // No variadic functions with 0 parameters
- fnptr_impls_safety_abi! { extern "Rust" fn() -> Ret, }
- fnptr_impls_safety_abi! { extern "C" fn() -> Ret, }
- fnptr_impls_safety_abi! { @c_unwind extern "C-unwind" fn() -> Ret, }
- fnptr_impls_safety_abi! { unsafe extern "Rust" fn() -> Ret, }
- fnptr_impls_safety_abi! { unsafe extern "C" fn() -> Ret, }
- fnptr_impls_safety_abi! { @c_unwind unsafe extern "C-unwind" fn() -> Ret, }
- };
+#[stable(feature = "fnptr_impls", since = "1.4.0")]
+impl<F: FnPtr> PartialEq for F {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ self.addr() == other.addr()
}
-
- fnptr_impls_args! {}
- fnptr_impls_args! { T }
- fnptr_impls_args! { A, B }
- fnptr_impls_args! { A, B, C }
- fnptr_impls_args! { A, B, C, D }
- fnptr_impls_args! { A, B, C, D, E }
- fnptr_impls_args! { A, B, C, D, E, F }
- fnptr_impls_args! { A, B, C, D, E, F, G }
- fnptr_impls_args! { A, B, C, D, E, F, G, H }
- fnptr_impls_args! { A, B, C, D, E, F, G, H, I }
- fnptr_impls_args! { A, B, C, D, E, F, G, H, I, J }
- fnptr_impls_args! { A, B, C, D, E, F, G, H, I, J, K }
- fnptr_impls_args! { A, B, C, D, E, F, G, H, I, J, K, L }
}
+#[stable(feature = "fnptr_impls", since = "1.4.0")]
+impl<F: FnPtr> Eq for F {}
-#[cfg(not(bootstrap))]
-mod new_fn_ptr_impl {
- use super::*;
- use crate::marker::FnPtr;
-
- #[stable(feature = "fnptr_impls", since = "1.4.0")]
- impl<F: FnPtr> PartialEq for F {
- #[inline]
- fn eq(&self, other: &Self) -> bool {
- self.addr() == other.addr()
- }
- }
- #[stable(feature = "fnptr_impls", since = "1.4.0")]
- impl<F: FnPtr> Eq for F {}
-
- #[stable(feature = "fnptr_impls", since = "1.4.0")]
- impl<F: FnPtr> PartialOrd for F {
- #[inline]
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- self.addr().partial_cmp(&other.addr())
- }
+#[stable(feature = "fnptr_impls", since = "1.4.0")]
+impl<F: FnPtr> PartialOrd for F {
+ #[inline]
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.addr().partial_cmp(&other.addr())
}
- #[stable(feature = "fnptr_impls", since = "1.4.0")]
- impl<F: FnPtr> Ord for F {
- #[inline]
- fn cmp(&self, other: &Self) -> Ordering {
- self.addr().cmp(&other.addr())
- }
+}
+#[stable(feature = "fnptr_impls", since = "1.4.0")]
+impl<F: FnPtr> Ord for F {
+ #[inline]
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.addr().cmp(&other.addr())
}
+}
- #[stable(feature = "fnptr_impls", since = "1.4.0")]
- impl<F: FnPtr> hash::Hash for F {
- fn hash<HH: hash::Hasher>(&self, state: &mut HH) {
- state.write_usize(self.addr() as _)
- }
+#[stable(feature = "fnptr_impls", since = "1.4.0")]
+impl<F: FnPtr> hash::Hash for F {
+ fn hash<HH: hash::Hasher>(&self, state: &mut HH) {
+ state.write_usize(self.addr() as _)
}
+}
- #[stable(feature = "fnptr_impls", since = "1.4.0")]
- impl<F: FnPtr> fmt::Pointer for F {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::pointer_fmt_inner(self.addr() as _, f)
- }
+#[stable(feature = "fnptr_impls", since = "1.4.0")]
+impl<F: FnPtr> fmt::Pointer for F {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::pointer_fmt_inner(self.addr() as _, f)
}
+}
- #[stable(feature = "fnptr_impls", since = "1.4.0")]
- impl<F: FnPtr> fmt::Debug for F {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::pointer_fmt_inner(self.addr() as _, f)
- }
+#[stable(feature = "fnptr_impls", since = "1.4.0")]
+impl<F: FnPtr> fmt::Debug for F {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::pointer_fmt_inner(self.addr() as _, f)
}
}
+
/// Create a `const` raw pointer to a place, without creating an intermediate reference.
///
/// Creating a reference with `&`/`&mut` is only allowed if the pointer is properly aligned
@@ -2121,7 +1968,7 @@ mod new_fn_ptr_impl {
/// assert_eq!(unsafe { raw_f2.read_unaligned() }, 2);
/// ```
///
-/// See [`addr_of_mut`] for how to create a pointer to unininitialized data.
+/// See [`addr_of_mut`] for how to create a pointer to uninitialized data.
/// Doing that with `addr_of` would not make much sense since one could only
/// read the data, and that would be Undefined Behavior.
#[stable(feature = "raw_ref_macros", since = "1.51.0")]
diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs
index ece5244e9..2fe5164c3 100644
--- a/library/core/src/ptr/mut_ptr.rs
+++ b/library/core/src/ptr/mut_ptr.rs
@@ -138,8 +138,8 @@ impl<T: ?Sized> *mut T {
/// ```
#[unstable(feature = "ptr_to_from_bits", issue = "91126")]
#[deprecated(
- since = "1.67",
- note = "replaced by the `exposed_addr` method, or update your code \
+ since = "1.67.0",
+ note = "replaced by the `expose_addr` method, or update your code \
to follow the strict provenance rules using its APIs"
)]
#[inline(always)]
@@ -167,7 +167,7 @@ impl<T: ?Sized> *mut T {
/// ```
#[unstable(feature = "ptr_to_from_bits", issue = "91126")]
#[deprecated(
- since = "1.67",
+ since = "1.67.0",
note = "replaced by the `ptr::from_exposed_addr_mut` function, or \
update your code to follow the strict provenance rules using its APIs"
)]
@@ -270,7 +270,7 @@ impl<T: ?Sized> *mut T {
let dest_addr = addr as isize;
let offset = dest_addr.wrapping_sub(self_addr);
- // This is the canonical desugarring of this operation
+ // This is the canonical desugaring of this operation
self.wrapping_byte_offset(offset)
}
@@ -473,10 +473,20 @@ impl<T: ?Sized> *mut T {
where
T: Sized,
{
+ #[cfg(bootstrap)]
// SAFETY: the caller must uphold the safety contract for `offset`.
// The obtained pointer is valid for writes since the caller must
// guarantee that it points to the same allocated object as `self`.
- unsafe { intrinsics::offset(self, count) as *mut T }
+ unsafe {
+ intrinsics::offset(self, count) as *mut T
+ }
+ #[cfg(not(bootstrap))]
+ // SAFETY: the caller must uphold the safety contract for `offset`.
+ // The obtained pointer is valid for writes since the caller must
+ // guarantee that it points to the same allocated object as `self`.
+ unsafe {
+ intrinsics::offset(self, count)
+ }
}
/// Calculates the offset from a pointer in bytes.
@@ -1016,8 +1026,16 @@ impl<T: ?Sized> *mut T {
where
T: Sized,
{
+ #[cfg(bootstrap)]
+ // SAFETY: the caller must uphold the safety contract for `offset`.
+ unsafe {
+ self.offset(count as isize)
+ }
+ #[cfg(not(bootstrap))]
// SAFETY: the caller must uphold the safety contract for `offset`.
- unsafe { self.offset(count as isize) }
+ unsafe {
+ intrinsics::offset(self, count)
+ }
}
/// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
@@ -1287,7 +1305,7 @@ impl<T: ?Sized> *mut T {
///
/// [`ptr::read`]: crate::ptr::read()
#[stable(feature = "pointer_methods", since = "1.26.0")]
- #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
+ #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn read(self) -> T
@@ -1328,7 +1346,7 @@ impl<T: ?Sized> *mut T {
///
/// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
#[stable(feature = "pointer_methods", since = "1.26.0")]
- #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
+ #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
#[inline(always)]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub const unsafe fn read_unaligned(self) -> T
@@ -2036,11 +2054,10 @@ impl<T> *mut [T] {
/// }
/// ```
#[unstable(feature = "slice_ptr_get", issue = "74265")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline(always)]
- pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
+ pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
where
- I: ~const SliceIndex<[T]>,
+ I: SliceIndex<[T]>,
{
// SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
unsafe { index.get_unchecked_mut(self) }
diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs
index 13f56c0ce..b492d2f07 100644
--- a/library/core/src/ptr/non_null.rs
+++ b/library/core/src/ptr/non_null.rs
@@ -449,6 +449,19 @@ impl<T: ?Sized> NonNull<T> {
// SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
unsafe { NonNull::new_unchecked(self.as_ptr() as *mut U) }
}
+
+ /// See [`pointer::add`] for semantics and safety requirements.
+ #[inline]
+ pub(crate) const unsafe fn add(self, delta: usize) -> Self
+ where
+ T: Sized,
+ {
+ // SAFETY: We require that the delta stays in-bounds of the object, and
+ // thus it cannot become null, as that would require wrapping the
+ // address space, which no legal objects are allowed to do.
+ // And the caller promised the `delta` is sound to add.
+ unsafe { NonNull { pointer: self.pointer.add(delta) } }
+ }
}
impl<T> NonNull<[T]> {
@@ -676,11 +689,10 @@ impl<T> NonNull<[T]> {
/// }
/// ```
#[unstable(feature = "slice_ptr_get", issue = "74265")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
- pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
+ pub unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
where
- I: ~const SliceIndex<[T]>,
+ I: SliceIndex<[T]>,
{
// SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
// As a consequence, the resulting pointer cannot be null.
@@ -689,8 +701,7 @@ impl<T> NonNull<[T]> {
}
#[stable(feature = "nonnull", since = "1.25.0")]
-#[rustc_const_unstable(feature = "const_clone", issue = "91805")]
-impl<T: ?Sized> const Clone for NonNull<T> {
+impl<T: ?Sized> Clone for NonNull<T> {
#[inline(always)]
fn clone(&self) -> Self {
*self
@@ -756,8 +767,7 @@ impl<T: ?Sized> hash::Hash for NonNull<T> {
}
#[unstable(feature = "ptr_internals", issue = "none")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T: ?Sized> const From<Unique<T>> for NonNull<T> {
+impl<T: ?Sized> From<Unique<T>> for NonNull<T> {
#[inline]
fn from(unique: Unique<T>) -> Self {
// SAFETY: A Unique pointer cannot be null, so the conditions for
@@ -767,8 +777,7 @@ impl<T: ?Sized> const From<Unique<T>> for NonNull<T> {
}
#[stable(feature = "nonnull", since = "1.25.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T: ?Sized> const From<&mut T> for NonNull<T> {
+impl<T: ?Sized> From<&mut T> for NonNull<T> {
/// Converts a `&mut T` to a `NonNull<T>`.
///
/// This conversion is safe and infallible since references cannot be null.
@@ -780,8 +789,7 @@ impl<T: ?Sized> const From<&mut T> for NonNull<T> {
}
#[stable(feature = "nonnull", since = "1.25.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T: ?Sized> const From<&T> for NonNull<T> {
+impl<T: ?Sized> From<&T> for NonNull<T> {
/// Converts a `&T` to a `NonNull<T>`.
///
/// This conversion is safe and infallible since references cannot be null.
diff --git a/library/core/src/ptr/unique.rs b/library/core/src/ptr/unique.rs
index 64616142b..a853f15ed 100644
--- a/library/core/src/ptr/unique.rs
+++ b/library/core/src/ptr/unique.rs
@@ -70,7 +70,8 @@ impl<T: Sized> Unique<T> {
#[must_use]
#[inline]
pub const fn dangling() -> Self {
- Self::from(NonNull::dangling())
+ // FIXME(const-hack) replace with `From`
+ Unique { pointer: NonNull::dangling(), _marker: PhantomData }
}
}
@@ -134,13 +135,14 @@ impl<T: ?Sized> Unique<T> {
#[must_use = "`self` will be dropped if the result is not used"]
#[inline]
pub const fn cast<U>(self) -> Unique<U> {
- Unique::from(self.pointer.cast())
+ // FIXME(const-hack): replace with `From`
+ // SAFETY: is `NonNull`
+ unsafe { Unique::new_unchecked(self.pointer.cast().as_ptr()) }
}
}
#[unstable(feature = "ptr_internals", issue = "none")]
-#[rustc_const_unstable(feature = "const_clone", issue = "91805")]
-impl<T: ?Sized> const Clone for Unique<T> {
+impl<T: ?Sized> Clone for Unique<T> {
#[inline]
fn clone(&self) -> Self {
*self
@@ -171,7 +173,7 @@ impl<T: ?Sized> fmt::Pointer for Unique<T> {
}
#[unstable(feature = "ptr_internals", issue = "none")]
-impl<T: ?Sized> const From<&mut T> for Unique<T> {
+impl<T: ?Sized> From<&mut T> for Unique<T> {
/// Converts a `&mut T` to a `Unique<T>`.
///
/// This conversion is infallible since references cannot be null.
@@ -182,7 +184,7 @@ impl<T: ?Sized> const From<&mut T> for Unique<T> {
}
#[unstable(feature = "ptr_internals", issue = "none")]
-impl<T: ?Sized> const From<NonNull<T>> for Unique<T> {
+impl<T: ?Sized> From<NonNull<T>> for Unique<T> {
/// Converts a `NonNull<T>` to a `Unique<T>`.
///
/// This conversion is infallible since `NonNull` cannot be null.
diff --git a/library/core/src/result.rs b/library/core/src/result.rs
index c48230fb8..1ee270f4c 100644
--- a/library/core/src/result.rs
+++ b/library/core/src/result.rs
@@ -489,7 +489,6 @@
#![stable(feature = "rust1", since = "1.0.0")]
use crate::iter::{self, FromIterator, FusedIterator, TrustedLen};
-use crate::marker::Destruct;
use crate::ops::{self, ControlFlow, Deref, DerefMut};
use crate::{convert, fmt, hint};
@@ -629,16 +628,10 @@ impl<T, E> Result<T, E> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_result_drop", issue = "92384")]
- pub const fn ok(self) -> Option<T>
- where
- E: ~const Destruct,
- {
+ pub fn ok(self) -> Option<T> {
match self {
Ok(x) => Some(x),
- // FIXME: ~const Drop doesn't quite work right yet
- #[allow(unused_variables)]
- Err(x) => None,
+ Err(_) => None,
}
}
@@ -658,15 +651,9 @@ impl<T, E> Result<T, E> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_result_drop", issue = "92384")]
- pub const fn err(self) -> Option<E>
- where
- T: ~const Destruct,
- {
+ pub fn err(self) -> Option<E> {
match self {
- // FIXME: ~const Drop doesn't quite work right yet
- #[allow(unused_variables)]
- Ok(x) => None,
+ Ok(_) => None,
Err(x) => Some(x),
}
}
@@ -1287,18 +1274,10 @@ impl<T, E> Result<T, E> {
/// assert_eq!(x.and(y), Ok("different result type"));
/// ```
#[inline]
- #[rustc_const_unstable(feature = "const_result_drop", issue = "92384")]
#[stable(feature = "rust1", since = "1.0.0")]
- pub const fn and<U>(self, res: Result<U, E>) -> Result<U, E>
- where
- T: ~const Destruct,
- U: ~const Destruct,
- E: ~const Destruct,
- {
+ pub fn and<U>(self, res: Result<U, E>) -> Result<U, E> {
match self {
- // FIXME: ~const Drop doesn't quite work right yet
- #[allow(unused_variables)]
- Ok(x) => res,
+ Ok(_) => res,
Err(e) => Err(e),
}
}
@@ -1370,19 +1349,11 @@ impl<T, E> Result<T, E> {
/// assert_eq!(x.or(y), Ok(2));
/// ```
#[inline]
- #[rustc_const_unstable(feature = "const_result_drop", issue = "92384")]
#[stable(feature = "rust1", since = "1.0.0")]
- pub const fn or<F>(self, res: Result<T, F>) -> Result<T, F>
- where
- T: ~const Destruct,
- E: ~const Destruct,
- F: ~const Destruct,
- {
+ pub fn or<F>(self, res: Result<T, F>) -> Result<T, F> {
match self {
Ok(v) => Ok(v),
- // FIXME: ~const Drop doesn't quite work right yet
- #[allow(unused_variables)]
- Err(e) => res,
+ Err(_) => res,
}
}
@@ -1430,18 +1401,11 @@ impl<T, E> Result<T, E> {
/// assert_eq!(x.unwrap_or(default), default);
/// ```
#[inline]
- #[rustc_const_unstable(feature = "const_result_drop", issue = "92384")]
#[stable(feature = "rust1", since = "1.0.0")]
- pub const fn unwrap_or(self, default: T) -> T
- where
- T: ~const Destruct,
- E: ~const Destruct,
- {
+ pub fn unwrap_or(self, default: T) -> T {
match self {
Ok(t) => t,
- // FIXME: ~const Drop doesn't quite work right yet
- #[allow(unused_variables)]
- Err(e) => default,
+ Err(_) => default,
}
}
@@ -1704,11 +1668,10 @@ fn unwrap_failed<T>(_msg: &str, _error: &T) -> ! {
/////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_clone", issue = "91805")]
-impl<T, E> const Clone for Result<T, E>
+impl<T, E> Clone for Result<T, E>
where
- T: ~const Clone + ~const Destruct,
- E: ~const Clone + ~const Destruct,
+ T: Clone,
+ E: Clone,
{
#[inline]
fn clone(&self) -> Self {
@@ -1971,8 +1934,7 @@ impl<A, E, V: FromIterator<A>> FromIterator<Result<A, E>> for Result<V, E> {
}
#[unstable(feature = "try_trait_v2", issue = "84277")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T, E> const ops::Try for Result<T, E> {
+impl<T, E> ops::Try for Result<T, E> {
type Output = T;
type Residual = Result<convert::Infallible, E>;
@@ -1991,10 +1953,7 @@ impl<T, E> const ops::Try for Result<T, E> {
}
#[unstable(feature = "try_trait_v2", issue = "84277")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T, E, F: ~const From<E>> const ops::FromResidual<Result<convert::Infallible, E>>
- for Result<T, F>
-{
+impl<T, E, F: From<E>> ops::FromResidual<Result<convert::Infallible, E>> for Result<T, F> {
#[inline]
#[track_caller]
fn from_residual(residual: Result<convert::Infallible, E>) -> Self {
@@ -2013,7 +1972,6 @@ impl<T, E, F: From<E>> ops::FromResidual<ops::Yeet<E>> for Result<T, F> {
}
#[unstable(feature = "try_trait_v2_residual", issue = "91285")]
-#[rustc_const_unstable(feature = "const_try", issue = "74935")]
-impl<T, E> const ops::Residual<T> for Result<convert::Infallible, E> {
+impl<T, E> ops::Residual<T> for Result<convert::Infallible, E> {
type TryType = Result<T, E>;
}
diff --git a/library/core/src/slice/ascii.rs b/library/core/src/slice/ascii.rs
index 5e5399acc..f3311f76a 100644
--- a/library/core/src/slice/ascii.rs
+++ b/library/core/src/slice/ascii.rs
@@ -10,12 +10,43 @@ use crate::ops;
impl [u8] {
/// Checks if all bytes in this slice are within the ASCII range.
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+ #[rustc_const_unstable(feature = "const_slice_is_ascii", issue = "111090")]
#[must_use]
#[inline]
- pub fn is_ascii(&self) -> bool {
+ pub const fn is_ascii(&self) -> bool {
is_ascii(self)
}
+ /// If this slice [`is_ascii`](Self::is_ascii), returns it as a slice of
+ /// [ASCII characters](`ascii::Char`), otherwise returns `None`.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[must_use]
+ #[inline]
+ pub const fn as_ascii(&self) -> Option<&[ascii::Char]> {
+ if self.is_ascii() {
+ // SAFETY: Just checked that it's ASCII
+ Some(unsafe { self.as_ascii_unchecked() })
+ } else {
+ None
+ }
+ }
+
+ /// Converts this slice of bytes into a slice of ASCII characters,
+ /// without checking whether they're valid.
+ ///
+ /// # Safety
+ ///
+ /// Every byte in the slice must be in `0..=127`, or else this is UB.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[must_use]
+ #[inline]
+ pub const unsafe fn as_ascii_unchecked(&self) -> &[ascii::Char] {
+ let byte_ptr: *const [u8] = self;
+ let ascii_ptr = byte_ptr as *const [ascii::Char];
+ // SAFETY: The caller promised all the bytes are ASCII
+ unsafe { &*ascii_ptr }
+ }
+
/// Checks that two slices are an ASCII case-insensitive match.
///
/// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
@@ -232,11 +263,29 @@ impl<'a> fmt::Debug for EscapeAscii<'a> {
/// Returns `true` if any byte in the word `v` is nonascii (>= 128). Snarfed
/// from `../str/mod.rs`, which does something similar for utf8 validation.
#[inline]
-fn contains_nonascii(v: usize) -> bool {
+const fn contains_nonascii(v: usize) -> bool {
const NONASCII_MASK: usize = usize::repeat_u8(0x80);
(NONASCII_MASK & v) != 0
}
+/// ASCII test *without* the chunk-at-a-time optimizations.
+///
+/// This is carefully structured to produce nice small code -- it's smaller in
+/// `-O` than what the "obvious" ways produces under `-C opt-level=s`. If you
+/// touch it, be sure to run (and update if needed) the assembly test.
+#[unstable(feature = "str_internals", issue = "none")]
+#[doc(hidden)]
+#[inline]
+pub const fn is_ascii_simple(mut bytes: &[u8]) -> bool {
+ while let [rest @ .., last] = bytes {
+ if !last.is_ascii() {
+ break;
+ }
+ bytes = rest;
+ }
+ bytes.is_empty()
+}
+
/// Optimized ASCII test that will use usize-at-a-time operations instead of
/// byte-at-a-time operations (when possible).
///
@@ -250,7 +299,7 @@ fn contains_nonascii(v: usize) -> bool {
/// If any of these loads produces something for which `contains_nonascii`
/// (above) returns true, then we know the answer is false.
#[inline]
-fn is_ascii(s: &[u8]) -> bool {
+const fn is_ascii(s: &[u8]) -> bool {
const USIZE_SIZE: usize = mem::size_of::<usize>();
let len = s.len();
@@ -262,7 +311,7 @@ fn is_ascii(s: &[u8]) -> bool {
// We also do this for architectures where `size_of::<usize>()` isn't
// sufficient alignment for `usize`, because it's a weird edge case.
if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
- return s.iter().all(|b| b.is_ascii());
+ return is_ascii_simple(s);
}
// We always read the first word unaligned, which means `align_offset` is
@@ -291,18 +340,26 @@ fn is_ascii(s: &[u8]) -> bool {
// Paranoia check about alignment, since we're about to do a bunch of
// unaligned loads. In practice this should be impossible barring a bug in
// `align_offset` though.
- debug_assert_eq!(word_ptr.addr() % mem::align_of::<usize>(), 0);
+ // While this method is allowed to spuriously fail in CTFE, if it doesn't
+ // have alignment information it should have given a `usize::MAX` for
+ // `align_offset` earlier, sending things through the scalar path instead of
+ // this one, so this check should pass if it's reachable.
+ debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>()));
// Read subsequent words until the last aligned word, excluding the last
// aligned word by itself to be done in tail check later, to ensure that
// tail is always one `usize` at most to extra branch `byte_pos == len`.
while byte_pos < len - USIZE_SIZE {
- debug_assert!(
- // Sanity check that the read is in bounds
- (word_ptr.addr() + USIZE_SIZE) <= start.addr().wrapping_add(len) &&
- // And that our assumptions about `byte_pos` hold.
- (word_ptr.addr() - start.addr()) == byte_pos
- );
+ // Sanity check that the read is in bounds
+ debug_assert!(byte_pos + USIZE_SIZE <= len);
+ // And that our assumptions about `byte_pos` hold.
+ debug_assert!(matches!(
+ word_ptr.cast::<u8>().guaranteed_eq(start.wrapping_add(byte_pos)),
+ // These are from the same allocation, so will hopefully always be
+ // known to match even in CTFE, but if it refuses to compare them
+ // that's ok since it's just a debug check anyway.
+ None | Some(true),
+ ));
// SAFETY: We know `word_ptr` is properly aligned (because of
// `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
diff --git a/library/core/src/slice/index.rs b/library/core/src/slice/index.rs
index 353935324..6ef9f9c95 100644
--- a/library/core/src/slice/index.rs
+++ b/library/core/src/slice/index.rs
@@ -7,10 +7,9 @@ use crate::ops;
use crate::ptr;
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-impl<T, I> const ops::Index<I> for [T]
+impl<T, I> ops::Index<I> for [T]
where
- I: ~const SliceIndex<[T]>,
+ I: SliceIndex<[T]>,
{
type Output = I::Output;
@@ -21,10 +20,9 @@ where
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-impl<T, I> const ops::IndexMut<I> for [T]
+impl<T, I> ops::IndexMut<I> for [T]
where
- I: ~const SliceIndex<[T]>,
+ I: SliceIndex<[T]>,
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut I::Output {
@@ -162,7 +160,6 @@ mod private_slice_index {
message = "the type `{T}` cannot be indexed by `{Self}`",
label = "slice indices are of type `usize` or ranges of `usize`"
)]
-#[const_trait]
pub unsafe trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
/// The output type returned by methods.
#[stable(feature = "slice_get_slice", since = "1.28.0")]
@@ -211,7 +208,7 @@ pub unsafe trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for usize {
+unsafe impl<T> SliceIndex<[T]> for usize {
type Output = T;
#[inline]
@@ -271,7 +268,7 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
/// Because `IndexRange` guarantees `start <= end`, fewer checks are needed here
/// than there are for a general `Range<usize>` (which might be `100..3`).
#[rustc_const_unstable(feature = "const_index_range_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for ops::IndexRange {
+unsafe impl<T> SliceIndex<[T]> for ops::IndexRange {
type Output = [T];
#[inline]
@@ -347,7 +344,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::IndexRange {
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
+unsafe impl<T> SliceIndex<[T]> for ops::Range<usize> {
type Output = [T];
#[inline]
@@ -428,7 +425,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for ops::RangeTo<usize> {
+unsafe impl<T> SliceIndex<[T]> for ops::RangeTo<usize> {
type Output = [T];
#[inline]
@@ -466,7 +463,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeTo<usize> {
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for ops::RangeFrom<usize> {
+unsafe impl<T> SliceIndex<[T]> for ops::RangeFrom<usize> {
type Output = [T];
#[inline]
@@ -512,7 +509,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeFrom<usize> {
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for ops::RangeFull {
+unsafe impl<T> SliceIndex<[T]> for ops::RangeFull {
type Output = [T];
#[inline]
@@ -548,7 +545,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeFull {
#[stable(feature = "inclusive_range", since = "1.26.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for ops::RangeInclusive<usize> {
+unsafe impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
type Output = [T];
#[inline]
@@ -592,7 +589,7 @@ unsafe impl<T> const SliceIndex<[T]> for ops::RangeInclusive<usize> {
#[stable(feature = "inclusive_range", since = "1.26.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl<T> const SliceIndex<[T]> for ops::RangeToInclusive<usize> {
+unsafe impl<T> SliceIndex<[T]> for ops::RangeToInclusive<usize> {
type Output = [T];
#[inline]
diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs
index 88b84bd13..5369fe0a9 100644
--- a/library/core/src/slice/iter.rs
+++ b/library/core/src/slice/iter.rs
@@ -13,7 +13,7 @@ use crate::iter::{
use crate::marker::{PhantomData, Send, Sized, Sync};
use crate::mem::{self, SizedTypeProperties};
use crate::num::NonZeroUsize;
-use crate::ptr::NonNull;
+use crate::ptr::{invalid, invalid_mut, NonNull};
use super::{from_raw_parts, from_raw_parts_mut};
@@ -60,10 +60,15 @@ impl<'a, T> IntoIterator for &'a mut [T] {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct Iter<'a, T: 'a> {
+ /// The pointer to the next element to return, or the past-the-end location
+ /// if the iterator is empty.
+ ///
+ /// This address will be used for all ZST elements, never changed.
ptr: NonNull<T>,
- end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
- // ptr == end is a quick test for the Iterator being empty, that works
- // for both ZST and non-ZST.
+ /// For non-ZSTs, the non-null pointer to the past-the-end element.
+ ///
+ /// For ZSTs, this is `ptr::invalid(len)`.
+ end: *const T,
_marker: PhantomData<&'a T>,
}
@@ -85,10 +90,7 @@ impl<'a, T> Iter<'a, T> {
let ptr = slice.as_ptr();
// SAFETY: Similar to `IterMut::new`.
unsafe {
- assume(!ptr.is_null());
-
- let end =
- if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) };
+ let end = if T::IS_ZST { invalid(slice.len()) } else { ptr.add(slice.len()) };
Self { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: PhantomData }
}
@@ -179,10 +181,15 @@ impl<T> AsRef<[T]> for Iter<'_, T> {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct IterMut<'a, T: 'a> {
+ /// The pointer to the next element to return, or the past-the-end location
+ /// if the iterator is empty.
+ ///
+ /// This address will be used for all ZST elements, never changed.
ptr: NonNull<T>,
- end: *mut T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
- // ptr == end is a quick test for the Iterator being empty, that works
- // for both ZST and non-ZST.
+ /// For non-ZSTs, the non-null pointer to the past-the-end element.
+ ///
+ /// For ZSTs, this is `ptr::invalid_mut(len)`.
+ end: *mut T,
_marker: PhantomData<&'a mut T>,
}
@@ -219,10 +226,7 @@ impl<'a, T> IterMut<'a, T> {
// See the `next_unchecked!` and `is_empty!` macros as well as the
// `post_inc_start` method for more information.
unsafe {
- assume(!ptr.is_null());
-
- let end =
- if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) };
+ let end = if T::IS_ZST { invalid_mut(slice.len()) } else { ptr.add(slice.len()) };
Self { ptr: NonNull::new_unchecked(ptr), end, _marker: PhantomData }
}
@@ -685,7 +689,7 @@ where
None
} else {
self.finished = true;
- Some(mem::replace(&mut self.v, &mut []))
+ Some(mem::take(&mut self.v))
}
}
}
@@ -749,7 +753,7 @@ where
match idx_opt {
None => self.finish(),
Some(idx) => {
- let tmp = mem::replace(&mut self.v, &mut []);
+ let tmp = mem::take(&mut self.v);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(&mut tail[1..])
@@ -830,7 +834,7 @@ where
if idx == self.v.len() {
self.finished = true;
}
- let tmp = mem::replace(&mut self.v, &mut []);
+ let tmp = mem::take(&mut self.v);
let (head, tail) = tmp.split_at_mut(idx);
self.v = tail;
Some(head)
@@ -876,7 +880,7 @@ where
if idx == 0 {
self.finished = true;
}
- let tmp = mem::replace(&mut self.v, &mut []);
+ let tmp = mem::take(&mut self.v);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(tail)
diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs
index 392752f2a..3462c0e02 100644
--- a/library/core/src/slice/iter/macros.rs
+++ b/library/core/src/slice/iter/macros.rs
@@ -1,11 +1,30 @@
//! Macros used by iterators of slice.
+// Shrinks the iterator when T is a ZST, setting the length to `new_len`.
+// `new_len` must not exceed `self.len()`.
+macro_rules! zst_set_len {
+ ($self: ident, $new_len: expr) => {{
+ #![allow(unused_unsafe)] // we're sometimes used within an unsafe block
+
+ // SAFETY: same as `invalid(_mut)`, but the macro doesn't know
+ // which versions of that function to call, so open-code it.
+ $self.end = unsafe { mem::transmute::<usize, _>($new_len) };
+ }};
+}
+
+// Shrinks the iterator when T is a ZST, reducing the length by `n`.
+// `n` must not exceed `self.len()`.
+macro_rules! zst_shrink {
+ ($self: ident, $n: ident) => {
+ let new_len = $self.end.addr() - $n;
+ zst_set_len!($self, new_len);
+ };
+}
+
// Inlining is_empty and len makes a huge performance difference
macro_rules! is_empty {
- // The way we encode the length of a ZST iterator, this works both for ZST
- // and non-ZST.
($self: ident) => {
- $self.ptr.as_ptr() as *const T == $self.end
+ if T::IS_ZST { $self.end.addr() == 0 } else { $self.ptr.as_ptr() as *const _ == $self.end }
};
}
@@ -13,16 +32,13 @@ macro_rules! len {
($self: ident) => {{
#![allow(unused_unsafe)] // we're sometimes used within an unsafe block
- let start = $self.ptr;
if T::IS_ZST {
- // This _cannot_ use `ptr_sub` because we depend on wrapping
- // to represent the length of long ZST slice iterators.
- $self.end.addr().wrapping_sub(start.as_ptr().addr())
+ $self.end.addr()
} else {
// To get rid of some bounds checks (see `position`), we use ptr_sub instead of
// offset_from (Tested by `codegen/slice-position-bounds-check`.)
// SAFETY: by the type invariant pointers are aligned and `start <= end`
- unsafe { $self.end.sub_ptr(start.as_ptr()) }
+ unsafe { $self.end.sub_ptr($self.ptr.as_ptr()) }
}
}};
}
@@ -50,14 +66,6 @@ macro_rules! iterator {
($self: ident) => {& $( $mut_ )? *$self.pre_dec_end(1)}
}
- // Shrinks the iterator when T is a ZST, by moving the end of the iterator
- // backwards by `n`. `n` must not exceed `self.len()`.
- macro_rules! zst_shrink {
- ($self: ident, $n: ident) => {
- $self.end = $self.end.wrapping_byte_sub($n);
- }
- }
-
impl<'a, T> $name<'a, T> {
// Helper function for creating a slice from the iterator.
#[inline(always)]
@@ -73,16 +81,15 @@ macro_rules! iterator {
// Unsafe because the offset must not exceed `self.len()`.
#[inline(always)]
unsafe fn post_inc_start(&mut self, offset: usize) -> * $raw_mut T {
- if mem::size_of::<T>() == 0 {
+ let old = self.ptr;
+ if T::IS_ZST {
zst_shrink!(self, offset);
- self.ptr.as_ptr()
} else {
- let old = self.ptr.as_ptr();
// SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`,
// so this new pointer is inside `self` and thus guaranteed to be non-null.
- self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().add(offset)) };
- old
+ self.ptr = unsafe { self.ptr.add(offset) };
}
+ old.as_ptr()
}
// Helper function for moving the end of the iterator backwards by `offset` elements,
@@ -124,12 +131,10 @@ macro_rules! iterator {
fn next(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
- // SAFETY: `assume` calls are safe since a slice's start pointer
- // must be non-null, and slices over non-ZSTs must also have a
- // non-null end pointer. The call to `next_unchecked!` is safe
- // since we check if the iterator is empty first.
+ // SAFETY: `assume` call is safe because slices over non-ZSTs must
+ // have a non-null end pointer. The call to `next_unchecked!` is
+ // safe since we check if the iterator is empty first.
unsafe {
- assume(!self.ptr.as_ptr().is_null());
if !<T>::IS_ZST {
assume(!self.end.is_null());
}
@@ -157,9 +162,7 @@ macro_rules! iterator {
if n >= len!(self) {
// This iterator is now empty.
if T::IS_ZST {
- // We have to do it this way as `ptr` may never be 0, but `end`
- // could be (due to wrapping).
- self.end = self.ptr.as_ptr();
+ zst_set_len!(self, 0);
} else {
// SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr
unsafe {
@@ -339,12 +342,10 @@ macro_rules! iterator {
fn next_back(&mut self) -> Option<$elem> {
// could be implemented with slices, but this avoids bounds checks
- // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null,
- // and slices over non-ZSTs must also have a non-null end pointer.
- // The call to `next_back_unchecked!` is safe since we check if the iterator is
- // empty first.
+ // SAFETY: `assume` call is safe because slices over non-ZSTs must
+ // have a non-null end pointer. The call to `next_back_unchecked!`
+ // is safe since we check if the iterator is empty first.
unsafe {
- assume(!self.ptr.as_ptr().is_null());
if !<T>::IS_ZST {
assume(!self.end.is_null());
}
@@ -360,7 +361,11 @@ macro_rules! iterator {
fn nth_back(&mut self, n: usize) -> Option<$elem> {
if n >= len!(self) {
// This iterator is now empty.
- self.end = self.ptr.as_ptr();
+ if T::IS_ZST {
+ zst_set_len!(self, 0);
+ } else {
+ self.end = self.ptr.as_ptr();
+ }
return None;
}
// SAFETY: We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
diff --git a/library/core/src/slice/memchr.rs b/library/core/src/slice/memchr.rs
index 98c8349eb..3a8b59d72 100644
--- a/library/core/src/slice/memchr.rs
+++ b/library/core/src/slice/memchr.rs
@@ -1,7 +1,6 @@
// Original implementation taken from rust-memchr.
// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch
-use crate::cmp;
use crate::mem;
const LO_USIZE: usize = usize::repeat_u8(0x01);
@@ -83,8 +82,12 @@ const fn memchr_aligned(x: u8, text: &[u8]) -> Option<usize> {
let mut offset = ptr.align_offset(USIZE_BYTES);
if offset > 0 {
- offset = cmp::min(offset, len);
- if let Some(index) = memchr_naive(x, &text[..offset]) {
+ // FIXME(const-hack, fee1-dead): replace with min
+ offset = if offset < len { offset } else { len };
+ // FIXME(const-hack, fee1-dead): replace with range slicing
+ // SAFETY: offset is within bounds
+ let slice = unsafe { super::from_raw_parts(text.as_ptr(), offset) };
+ if let Some(index) = memchr_naive(x, slice) {
return Some(index);
}
}
@@ -110,7 +113,10 @@ const fn memchr_aligned(x: u8, text: &[u8]) -> Option<usize> {
// Find the byte after the point the body loop stopped.
// FIXME(const-hack): Use `?` instead.
- if let Some(i) = memchr_naive(x, &text[offset..]) { Some(offset + i) } else { None }
+ // FIXME(const-hack, fee1-dead): use range slicing
+ // SAFETY: offset is within bounds
+ let slice = unsafe { super::from_raw_parts(text.as_ptr().add(offset), text.len() - offset) };
+ if let Some(i) = memchr_naive(x, slice) { Some(offset + i) } else { None }
}
/// Returns the last index matching the byte `x` in `text`.
diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs
index f541808a6..ea0181e35 100644
--- a/library/core/src/slice/mod.rs
+++ b/library/core/src/slice/mod.rs
@@ -42,8 +42,13 @@ mod index;
mod iter;
mod raw;
mod rotate;
+mod select;
mod specialize;
+#[unstable(feature = "str_internals", issue = "none")]
+#[doc(hidden)]
+pub use ascii::is_ascii_simple;
+
#[stable(feature = "rust1", since = "1.0.0")]
pub use iter::{Chunks, ChunksMut, Windows};
#[stable(feature = "rust1", since = "1.0.0")]
@@ -315,6 +320,264 @@ impl<T> [T] {
if let [.., last] = self { Some(last) } else { None }
}
+ /// Returns the first `N` elements of the slice, or `None` if it has fewer than `N` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let u = [10, 40, 30];
+ /// assert_eq!(Some(&[10, 40]), u.first_chunk::<2>());
+ ///
+ /// let v: &[i32] = &[10];
+ /// assert_eq!(None, v.first_chunk::<2>());
+ ///
+ /// let w: &[i32] = &[];
+ /// assert_eq!(Some(&[]), w.first_chunk::<0>());
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn first_chunk<const N: usize>(&self) -> Option<&[T; N]> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We explicitly check for the correct number of elements,
+ // and do not let the reference outlive the slice.
+ Some(unsafe { &*(self.as_ptr() as *const [T; N]) })
+ }
+ }
+
+ /// Returns a mutable reference to the first `N` elements of the slice,
+ /// or `None` if it has fewer than `N` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let x = &mut [0, 1, 2];
+ ///
+ /// if let Some(first) = x.first_chunk_mut::<2>() {
+ /// first[0] = 5;
+ /// first[1] = 4;
+ /// }
+ /// assert_eq!(x, &[5, 4, 2]);
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn first_chunk_mut<const N: usize>(&mut self) -> Option<&mut [T; N]> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We explicitly check for the correct number of elements,
+ // do not let the reference outlive the slice,
+ // and require exclusive access to the entire slice to mutate the chunk.
+ Some(unsafe { &mut *(self.as_mut_ptr() as *mut [T; N]) })
+ }
+ }
+
+ /// Returns the first `N` elements of the slice and the remainder,
+ /// or `None` if it has fewer than `N` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let x = &[0, 1, 2];
+ ///
+ /// if let Some((first, elements)) = x.split_first_chunk::<2>() {
+ /// assert_eq!(first, &[0, 1]);
+ /// assert_eq!(elements, &[2]);
+ /// }
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn split_first_chunk<const N: usize>(&self) -> Option<(&[T; N], &[T])> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We manually verified the bounds of the split.
+ let (first, tail) = unsafe { self.split_at_unchecked(N) };
+
+ // SAFETY: We explicitly check for the correct number of elements,
+ // and do not let the references outlive the slice.
+ Some((unsafe { &*(first.as_ptr() as *const [T; N]) }, tail))
+ }
+ }
+
+ /// Returns a mutable reference to the first `N` elements of the slice and the remainder,
+ /// or `None` if it has fewer than `N` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let x = &mut [0, 1, 2];
+ ///
+ /// if let Some((first, elements)) = x.split_first_chunk_mut::<2>() {
+ /// first[0] = 3;
+ /// first[1] = 4;
+ /// elements[0] = 5;
+ /// }
+ /// assert_eq!(x, &[3, 4, 5]);
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn split_first_chunk_mut<const N: usize>(
+ &mut self,
+ ) -> Option<(&mut [T; N], &mut [T])> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We manually verified the bounds of the split.
+ let (first, tail) = unsafe { self.split_at_mut_unchecked(N) };
+
+ // SAFETY: We explicitly check for the correct number of elements,
+ // do not let the reference outlive the slice,
+ // and enforce exclusive mutability of the chunk by the split.
+ Some((unsafe { &mut *(first.as_mut_ptr() as *mut [T; N]) }, tail))
+ }
+ }
+
+ /// Returns the last `N` elements of the slice and the remainder,
+ /// or `None` if it has fewer than `N` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let x = &[0, 1, 2];
+ ///
+ /// if let Some((last, elements)) = x.split_last_chunk::<2>() {
+ /// assert_eq!(last, &[1, 2]);
+ /// assert_eq!(elements, &[0]);
+ /// }
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn split_last_chunk<const N: usize>(&self) -> Option<(&[T; N], &[T])> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We manually verified the bounds of the split.
+ let (init, last) = unsafe { self.split_at_unchecked(self.len() - N) };
+
+ // SAFETY: We explicitly check for the correct number of elements,
+ // and do not let the references outlive the slice.
+ Some((unsafe { &*(last.as_ptr() as *const [T; N]) }, init))
+ }
+ }
+
+ /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let x = &mut [0, 1, 2];
+ ///
+ /// if let Some((last, elements)) = x.split_last_chunk_mut::<2>() {
+ /// last[0] = 3;
+ /// last[1] = 4;
+ /// elements[0] = 5;
+ /// }
+ /// assert_eq!(x, &[5, 3, 4]);
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn split_last_chunk_mut<const N: usize>(
+ &mut self,
+ ) -> Option<(&mut [T; N], &mut [T])> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We manually verified the bounds of the split.
+ let (init, last) = unsafe { self.split_at_mut_unchecked(self.len() - N) };
+
+ // SAFETY: We explicitly check for the correct number of elements,
+ // do not let the reference outlive the slice,
+ // and enforce exclusive mutability of the chunk by the split.
+ Some((unsafe { &mut *(last.as_mut_ptr() as *mut [T; N]) }, init))
+ }
+ }
+
+ /// Returns the last element of the slice, or `None` if it is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let u = [10, 40, 30];
+ /// assert_eq!(Some(&[40, 30]), u.last_chunk::<2>());
+ ///
+ /// let v: &[i32] = &[10];
+ /// assert_eq!(None, v.last_chunk::<2>());
+ ///
+ /// let w: &[i32] = &[];
+ /// assert_eq!(Some(&[]), w.last_chunk::<0>());
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn last_chunk<const N: usize>(&self) -> Option<&[T; N]> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We manually verified the bounds of the slice.
+ // FIXME: Without const traits, we need this instead of `get_unchecked`.
+ let last = unsafe { self.split_at_unchecked(self.len() - N).1 };
+
+ // SAFETY: We explicitly check for the correct number of elements,
+ // and do not let the references outlive the slice.
+ Some(unsafe { &*(last.as_ptr() as *const [T; N]) })
+ }
+ }
+
+ /// Returns a mutable pointer to the last item in the slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_first_last_chunk)]
+ ///
+ /// let x = &mut [0, 1, 2];
+ ///
+ /// if let Some(last) = x.last_chunk_mut::<2>() {
+ /// last[0] = 10;
+ /// last[1] = 20;
+ /// }
+ /// assert_eq!(x, &[0, 10, 20]);
+ /// ```
+ #[unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[rustc_const_unstable(feature = "slice_first_last_chunk", issue = "111774")]
+ #[inline]
+ pub const fn last_chunk_mut<const N: usize>(&mut self) -> Option<&mut [T; N]> {
+ if self.len() < N {
+ None
+ } else {
+ // SAFETY: We manually verified the bounds of the slice.
+ // FIXME: Without const traits, we need this instead of `get_unchecked`.
+ let last = unsafe { self.split_at_mut_unchecked(self.len() - N).1 };
+
+ // SAFETY: We explicitly check for the correct number of elements,
+ // do not let the reference outlive the slice,
+ // and require exclusive access to the entire slice to mutate the chunk.
+ Some(unsafe { &mut *(last.as_mut_ptr() as *mut [T; N]) })
+ }
+ }
+
/// Returns a reference to an element or subslice depending on the type of
/// index.
///
@@ -333,12 +596,11 @@ impl<T> [T] {
/// assert_eq!(None, v.get(0..4));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
#[must_use]
- pub const fn get<I>(&self, index: I) -> Option<&I::Output>
+ pub fn get<I>(&self, index: I) -> Option<&I::Output>
where
- I: ~const SliceIndex<Self>,
+ I: SliceIndex<Self>,
{
index.get(self)
}
@@ -359,12 +621,11 @@ impl<T> [T] {
/// assert_eq!(x, &[0, 42, 2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
#[must_use]
- pub const fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
+ pub fn get_mut<I>(&mut self, index: I) -> Option<&mut I::Output>
where
- I: ~const SliceIndex<Self>,
+ I: SliceIndex<Self>,
{
index.get_mut(self)
}
@@ -392,12 +653,11 @@ impl<T> [T] {
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
#[must_use]
- pub const unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
+ pub unsafe fn get_unchecked<I>(&self, index: I) -> &I::Output
where
- I: ~const SliceIndex<Self>,
+ I: SliceIndex<Self>,
{
// SAFETY: the caller must uphold most of the safety requirements for `get_unchecked`;
// the slice is dereferenceable because `self` is a safe reference.
@@ -430,12 +690,11 @@ impl<T> [T] {
/// assert_eq!(x, &[1, 13, 4]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
#[must_use]
- pub const unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
+ pub unsafe fn get_unchecked_mut<I>(&mut self, index: I) -> &mut I::Output
where
- I: ~const SliceIndex<Self>,
+ I: SliceIndex<Self>,
{
// SAFETY: the caller must uphold the safety requirements for `get_unchecked_mut`;
// the slice is dereferenceable because `self` is a safe reference.
@@ -678,9 +937,8 @@ impl<T> [T] {
/// assert!(v == [3, 2, 1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_reverse", issue = "100784")]
#[inline]
- pub const fn reverse(&mut self) {
+ pub fn reverse(&mut self) {
let half_len = self.len() / 2;
let Range { start, end } = self.as_mut_ptr_range();
@@ -703,7 +961,7 @@ impl<T> [T] {
revswap(front_half, back_half, half_len);
#[inline]
- const fn revswap<T>(a: &mut [T], b: &mut [T], n: usize) {
+ fn revswap<T>(a: &mut [T], b: &mut [T], n: usize) {
debug_assert!(a.len() == n);
debug_assert!(b.len() == n);
@@ -996,7 +1254,7 @@ impl<T> [T] {
#[unstable(feature = "slice_as_chunks", issue = "74985")]
#[inline]
#[must_use]
- pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
+ pub const unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
let this = self;
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
@@ -1044,7 +1302,7 @@ impl<T> [T] {
#[inline]
#[track_caller]
#[must_use]
- pub fn as_chunks<const N: usize>(&self) -> (&[[T; N]], &[T]) {
+ pub const fn as_chunks<const N: usize>(&self) -> (&[[T; N]], &[T]) {
assert!(N != 0, "chunk size must be non-zero");
let len = self.len() / N;
let (multiple_of_n, remainder) = self.split_at(len * N);
@@ -1076,7 +1334,7 @@ impl<T> [T] {
#[inline]
#[track_caller]
#[must_use]
- pub fn as_rchunks<const N: usize>(&self) -> (&[T], &[[T; N]]) {
+ pub const fn as_rchunks<const N: usize>(&self) -> (&[T], &[[T; N]]) {
assert!(N != 0, "chunk size must be non-zero");
let len = self.len() / N;
let (remainder, multiple_of_n) = self.split_at(self.len() - len * N);
@@ -1153,7 +1411,7 @@ impl<T> [T] {
#[unstable(feature = "slice_as_chunks", issue = "74985")]
#[inline]
#[must_use]
- pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
+ pub const unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
let this = &*self;
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
@@ -1196,7 +1454,7 @@ impl<T> [T] {
#[inline]
#[track_caller]
#[must_use]
- pub fn as_chunks_mut<const N: usize>(&mut self) -> (&mut [[T; N]], &mut [T]) {
+ pub const fn as_chunks_mut<const N: usize>(&mut self) -> (&mut [[T; N]], &mut [T]) {
assert!(N != 0, "chunk size must be non-zero");
let len = self.len() / N;
let (multiple_of_n, remainder) = self.split_at_mut(len * N);
@@ -1234,7 +1492,7 @@ impl<T> [T] {
#[inline]
#[track_caller]
#[must_use]
- pub fn as_rchunks_mut<const N: usize>(&mut self) -> (&mut [T], &mut [[T; N]]) {
+ pub const fn as_rchunks_mut<const N: usize>(&mut self) -> (&mut [T], &mut [[T; N]]) {
assert!(N != 0, "chunk size must be non-zero");
let len = self.len() / N;
let (remainder, multiple_of_n) = self.split_at_mut(self.len() - len * N);
@@ -1596,7 +1854,8 @@ impl<T> [T] {
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_slice_split_at_not_mut", issue = "101158")]
+ #[rustc_const_stable(feature = "const_slice_split_at_not_mut", since = "1.71.0")]
+ #[rustc_allow_const_fn_unstable(slice_split_at_unchecked)]
#[inline]
#[track_caller]
#[must_use]
@@ -2746,8 +3005,9 @@ impl<T> [T] {
///
/// # Current implementation
///
- /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
- /// used for [`sort_unstable`].
+ /// The current algorithm is an introselect implementation based on Pattern Defeating Quicksort, which is also
+ /// the basis for [`sort_unstable`]. The fallback algorithm is Median of Medians using Tukey's Ninther for
+ /// pivot selection, which guarantees linear runtime for all inputs.
///
/// [`sort_unstable`]: slice::sort_unstable
///
@@ -2776,7 +3036,7 @@ impl<T> [T] {
where
T: Ord,
{
- sort::partition_at_index(self, index, T::lt)
+ select::partition_at_index(self, index, T::lt)
}
/// Reorder the slice with a comparator function such that the element at `index` is at its
@@ -2797,8 +3057,9 @@ impl<T> [T] {
///
/// # Current implementation
///
- /// The current algorithm is based on the quickselect portion of the same quicksort algorithm
- /// used for [`sort_unstable`].
+ /// The current algorithm is an introselect implementation based on Pattern Defeating Quicksort, which is also
+ /// the basis for [`sort_unstable`]. The fallback algorithm is Median of Medians using Tukey's Ninther for
+ /// pivot selection, which guarantees linear runtime for all inputs.
///
/// [`sort_unstable`]: slice::sort_unstable
///
@@ -2831,7 +3092,7 @@ impl<T> [T] {
where
F: FnMut(&T, &T) -> Ordering,
{
- sort::partition_at_index(self, index, |a: &T, b: &T| compare(a, b) == Less)
+ select::partition_at_index(self, index, |a: &T, b: &T| compare(a, b) == Less)
}
/// Reorder the slice with a key extraction function such that the element at `index` is at its
@@ -2887,7 +3148,7 @@ impl<T> [T] {
F: FnMut(&T) -> K,
K: Ord,
{
- sort::partition_at_index(self, index, |a: &T, b: &T| f(a).lt(&f(b)))
+ select::partition_at_index(self, index, |a: &T, b: &T| f(a).lt(&f(b)))
}
/// Moves all consecutive repeated elements to the end of the slice according to the
@@ -3479,44 +3740,13 @@ impl<T> [T] {
// Ts = size_of::<U> / gcd(size_of::<T>, size_of::<U>)
//
// Luckily since all this is constant-evaluated... performance here matters not!
- #[inline]
- fn gcd(a: usize, b: usize) -> usize {
- use crate::intrinsics;
- // iterative stein’s algorithm
- // We should still make this `const fn` (and revert to recursive algorithm if we do)
- // because relying on llvm to consteval all this is… well, it makes me uncomfortable.
-
- // SAFETY: `a` and `b` are checked to be non-zero values.
- let (ctz_a, mut ctz_b) = unsafe {
- if a == 0 {
- return b;
- }
- if b == 0 {
- return a;
- }
- (intrinsics::cttz_nonzero(a), intrinsics::cttz_nonzero(b))
- };
- let k = ctz_a.min(ctz_b);
- let mut a = a >> ctz_a;
- let mut b = b;
- loop {
- // remove all factors of 2 from b
- b >>= ctz_b;
- if a > b {
- mem::swap(&mut a, &mut b);
- }
- b = b - a;
- // SAFETY: `b` is checked to be non-zero.
- unsafe {
- if b == 0 {
- break;
- }
- ctz_b = intrinsics::cttz_nonzero(b);
- }
- }
- a << k
+ const fn gcd(a: usize, b: usize) -> usize {
+ if b == 0 { a } else { gcd(b, a % b) }
}
- let gcd: usize = gcd(mem::size_of::<T>(), mem::size_of::<U>());
+
+ // Explicitly wrap the function call in a const block so it gets
+ // constant-evaluated even in debug mode.
+ let gcd: usize = const { gcd(mem::size_of::<T>(), mem::size_of::<U>()) };
let ts: usize = mem::size_of::<U>() / gcd;
let us: usize = mem::size_of::<T>() / gcd;
@@ -4262,7 +4492,7 @@ impl<T, const N: usize> [[T; N]] {
/// assert!(empty_slice_of_arrays.flatten().is_empty());
/// ```
#[unstable(feature = "slice_flatten", issue = "95629")]
- pub fn flatten(&self) -> &[T] {
+ pub const fn flatten(&self) -> &[T] {
let len = if T::IS_ZST {
self.len().checked_mul(N).expect("slice len overflow")
} else {
@@ -4404,8 +4634,7 @@ where
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl<T> const Default for &[T] {
+impl<T> Default for &[T] {
/// Creates an empty slice.
fn default() -> Self {
&[]
@@ -4413,8 +4642,7 @@ impl<T> const Default for &[T] {
}
#[stable(feature = "mut_slice_default", since = "1.5.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl<T> const Default for &mut [T] {
+impl<T> Default for &mut [T] {
/// Creates a mutable empty slice.
fn default() -> Self {
&mut []
@@ -4458,7 +4686,7 @@ impl<T, const N: usize> SlicePattern for [T; N] {
/// This will do `binomial(N + 1, 2) = N * (N + 1) / 2 = 0, 1, 3, 6, 10, ..`
/// comparison operations.
fn get_many_check_valid<const N: usize>(indices: &[usize; N], len: usize) -> bool {
- // NB: The optimzer should inline the loops into a sequence
+ // NB: The optimizer should inline the loops into a sequence
// of instructions without additional branching.
let mut valid = true;
for (i, &idx) in indices.iter().enumerate() {
diff --git a/library/core/src/slice/select.rs b/library/core/src/slice/select.rs
new file mode 100644
index 000000000..ffc193578
--- /dev/null
+++ b/library/core/src/slice/select.rs
@@ -0,0 +1,302 @@
+//! Slice selection
+//!
+//! This module contains the implementation for `slice::select_nth_unstable`.
+//! It uses an introselect algorithm based on Orson Peters' pattern-defeating quicksort,
+//! published at: <https://github.com/orlp/pdqsort>
+//!
+//! The fallback algorithm used for introselect is Median of Medians using Tukey's Ninther
+//! for pivot selection. Using this as a fallback ensures O(n) worst case running time with
+//! better performance than one would get using heapsort as fallback.
+
+use crate::cmp;
+use crate::mem::{self, SizedTypeProperties};
+use crate::slice::sort::{
+ break_patterns, choose_pivot, insertion_sort_shift_left, partition, partition_equal,
+};
+
+// For slices of up to this length it's probably faster to simply sort them.
+// Defined at the module scope because it's used in multiple functions.
+const MAX_INSERTION: usize = 10;
+
+fn partition_at_index_loop<'a, T, F>(
+ mut v: &'a mut [T],
+ mut index: usize,
+ is_less: &mut F,
+ mut pred: Option<&'a T>,
+) where
+ F: FnMut(&T, &T) -> bool,
+{
+ // Limit the amount of iterations and fall back to fast deterministic selection
+ // to ensure O(n) worst case running time. This limit needs to be constant, because
+ // using `ilog2(len)` like in `sort` would result in O(n log n) time complexity.
+ // The exact value of the limit is chosen somewhat arbitrarily, but for most inputs bad pivot
+ // selections should be relatively rare, so the limit usually shouldn't be reached
+ // anyways.
+ let mut limit = 16;
+
+ // True if the last partitioning was reasonably balanced.
+ let mut was_balanced = true;
+
+ loop {
+ if v.len() <= MAX_INSERTION {
+ if v.len() > 1 {
+ insertion_sort_shift_left(v, 1, is_less);
+ }
+ return;
+ }
+
+ if limit == 0 {
+ median_of_medians(v, is_less, index);
+ return;
+ }
+
+ // If the last partitioning was imbalanced, try breaking patterns in the slice by shuffling
+ // some elements around. Hopefully we'll choose a better pivot this time.
+ if !was_balanced {
+ break_patterns(v);
+ limit -= 1;
+ }
+
+ // Choose a pivot
+ let (pivot, _) = choose_pivot(v, is_less);
+
+ // If the chosen pivot is equal to the predecessor, then it's the smallest element in the
+ // slice. Partition the slice into elements equal to and elements greater than the pivot.
+ // This case is usually hit when the slice contains many duplicate elements.
+ if let Some(p) = pred {
+ if !is_less(p, &v[pivot]) {
+ let mid = partition_equal(v, pivot, is_less);
+
+ // If we've passed our index, then we're good.
+ if mid > index {
+ return;
+ }
+
+ // Otherwise, continue sorting elements greater than the pivot.
+ v = &mut v[mid..];
+ index = index - mid;
+ pred = None;
+ continue;
+ }
+ }
+
+ let (mid, _) = partition(v, pivot, is_less);
+ was_balanced = cmp::min(mid, v.len() - mid) >= v.len() / 8;
+
+ // Split the slice into `left`, `pivot`, and `right`.
+ let (left, right) = v.split_at_mut(mid);
+ let (pivot, right) = right.split_at_mut(1);
+ let pivot = &pivot[0];
+
+ if mid < index {
+ v = right;
+ index = index - mid - 1;
+ pred = Some(pivot);
+ } else if mid > index {
+ v = left;
+ } else {
+ // If mid == index, then we're done, since partition() guaranteed that all elements
+ // after mid are greater than or equal to mid.
+ return;
+ }
+ }
+}
+
+/// Helper function that returns the index of the minimum element in the slice using the given
+/// comparator function
+fn min_index<T, F: FnMut(&T, &T) -> bool>(slice: &[T], is_less: &mut F) -> Option<usize> {
+ slice
+ .iter()
+ .enumerate()
+ .reduce(|acc, t| if is_less(t.1, acc.1) { t } else { acc })
+ .map(|(i, _)| i)
+}
+
+/// Helper function that returns the index of the maximum element in the slice using the given
+/// comparator function
+fn max_index<T, F: FnMut(&T, &T) -> bool>(slice: &[T], is_less: &mut F) -> Option<usize> {
+ slice
+ .iter()
+ .enumerate()
+ .reduce(|acc, t| if is_less(acc.1, t.1) { t } else { acc })
+ .map(|(i, _)| i)
+}
+
+/// Reorder the slice such that the element at `index` is at its final sorted position.
+pub fn partition_at_index<T, F>(
+ v: &mut [T],
+ index: usize,
+ mut is_less: F,
+) -> (&mut [T], &mut T, &mut [T])
+where
+ F: FnMut(&T, &T) -> bool,
+{
+ if index >= v.len() {
+ panic!("partition_at_index index {} greater than length of slice {}", index, v.len());
+ }
+
+ if T::IS_ZST {
+ // Sorting has no meaningful behavior on zero-sized types. Do nothing.
+ } else if index == v.len() - 1 {
+ // Find max element and place it in the last position of the array. We're free to use
+ // `unwrap()` here because we know v must not be empty.
+ let max_idx = max_index(v, &mut is_less).unwrap();
+ v.swap(max_idx, index);
+ } else if index == 0 {
+ // Find min element and place it in the first position of the array. We're free to use
+ // `unwrap()` here because we know v must not be empty.
+ let min_idx = min_index(v, &mut is_less).unwrap();
+ v.swap(min_idx, index);
+ } else {
+ partition_at_index_loop(v, index, &mut is_less, None);
+ }
+
+ let (left, right) = v.split_at_mut(index);
+ let (pivot, right) = right.split_at_mut(1);
+ let pivot = &mut pivot[0];
+ (left, pivot, right)
+}
+
+/// Selection algorithm to select the k-th element from the slice in guaranteed O(n) time.
+/// This is essentially a quickselect that uses Tukey's Ninther for pivot selection
+fn median_of_medians<T, F: FnMut(&T, &T) -> bool>(mut v: &mut [T], is_less: &mut F, mut k: usize) {
+ // Since this function isn't public, it should never be called with an out-of-bounds index.
+ debug_assert!(k < v.len());
+
+ // If T is as ZST, `partition_at_index` will already return early.
+ debug_assert!(!T::IS_ZST);
+
+ // We now know that `k < v.len() <= isize::MAX`
+ loop {
+ if v.len() <= MAX_INSERTION {
+ if v.len() > 1 {
+ insertion_sort_shift_left(v, 1, is_less);
+ }
+ return;
+ }
+
+ // `median_of_{minima,maxima}` can't handle the extreme cases of the first/last element,
+ // so we catch them here and just do a linear search.
+ if k == v.len() - 1 {
+ // Find max element and place it in the last position of the array. We're free to use
+ // `unwrap()` here because we know v must not be empty.
+ let max_idx = max_index(v, is_less).unwrap();
+ v.swap(max_idx, k);
+ return;
+ } else if k == 0 {
+ // Find min element and place it in the first position of the array. We're free to use
+ // `unwrap()` here because we know v must not be empty.
+ let min_idx = min_index(v, is_less).unwrap();
+ v.swap(min_idx, k);
+ return;
+ }
+
+ let p = median_of_ninthers(v, is_less);
+
+ if p == k {
+ return;
+ } else if p > k {
+ v = &mut v[..p];
+ } else {
+ // Since `p < k < v.len()`, `p + 1` doesn't overflow and is
+ // a valid index into the slice.
+ v = &mut v[p + 1..];
+ k -= p + 1;
+ }
+ }
+}
+
+// Optimized for when `k` lies somewhere in the middle of the slice. Selects a pivot
+// as close as possible to the median of the slice. For more details on how the algorithm
+// operates, refer to the paper <https://drops.dagstuhl.de/opus/volltexte/2017/7612/pdf/LIPIcs-SEA-2017-24.pdf>.
+fn median_of_ninthers<T, F: FnMut(&T, &T) -> bool>(v: &mut [T], is_less: &mut F) -> usize {
+ // use `saturating_mul` so the multiplication doesn't overflow on 16-bit platforms.
+ let frac = if v.len() <= 1024 {
+ v.len() / 12
+ } else if v.len() <= 128_usize.saturating_mul(1024) {
+ v.len() / 64
+ } else {
+ v.len() / 1024
+ };
+
+ let pivot = frac / 2;
+ let lo = v.len() / 2 - pivot;
+ let hi = frac + lo;
+ let gap = (v.len() - 9 * frac) / 4;
+ let mut a = lo - 4 * frac - gap;
+ let mut b = hi + gap;
+ for i in lo..hi {
+ ninther(v, is_less, a, i - frac, b, a + 1, i, b + 1, a + 2, i + frac, b + 2);
+ a += 3;
+ b += 3;
+ }
+
+ median_of_medians(&mut v[lo..lo + frac], is_less, pivot);
+ partition(v, lo + pivot, is_less).0
+}
+
+/// Moves around the 9 elements at the indices a..i, such that
+/// `v[d]` contains the median of the 9 elements and the other
+/// elements are partitioned around it.
+fn ninther<T, F: FnMut(&T, &T) -> bool>(
+ v: &mut [T],
+ is_less: &mut F,
+ a: usize,
+ mut b: usize,
+ c: usize,
+ mut d: usize,
+ e: usize,
+ mut f: usize,
+ g: usize,
+ mut h: usize,
+ i: usize,
+) {
+ b = median_idx(v, is_less, a, b, c);
+ h = median_idx(v, is_less, g, h, i);
+ if is_less(&v[h], &v[b]) {
+ mem::swap(&mut b, &mut h);
+ }
+ if is_less(&v[f], &v[d]) {
+ mem::swap(&mut d, &mut f);
+ }
+ if is_less(&v[e], &v[d]) {
+ // do nothing
+ } else if is_less(&v[f], &v[e]) {
+ d = f;
+ } else {
+ if is_less(&v[e], &v[b]) {
+ v.swap(e, b);
+ } else if is_less(&v[h], &v[e]) {
+ v.swap(e, h);
+ }
+ return;
+ }
+ if is_less(&v[d], &v[b]) {
+ d = b;
+ } else if is_less(&v[h], &v[d]) {
+ d = h;
+ }
+
+ v.swap(d, e);
+}
+
+/// returns the index pointing to the median of the 3
+/// elements `v[a]`, `v[b]` and `v[c]`
+fn median_idx<T, F: FnMut(&T, &T) -> bool>(
+ v: &[T],
+ is_less: &mut F,
+ mut a: usize,
+ b: usize,
+ mut c: usize,
+) -> usize {
+ if is_less(&v[c], &v[a]) {
+ mem::swap(&mut a, &mut c);
+ }
+ if is_less(&v[c], &v[b]) {
+ return c;
+ }
+ if is_less(&v[b], &v[a]) {
+ return a;
+ }
+ b
+}
diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs
index 07fd96f92..db76d2625 100644
--- a/library/core/src/slice/sort.rs
+++ b/library/core/src/slice/sort.rs
@@ -145,7 +145,7 @@ where
/// Never inline this function to avoid code bloat. It still optimizes nicely and has practically no
/// performance impact. Even improving performance in some cases.
#[inline(never)]
-fn insertion_sort_shift_left<T, F>(v: &mut [T], offset: usize, is_less: &mut F)
+pub(super) fn insertion_sort_shift_left<T, F>(v: &mut [T], offset: usize, is_less: &mut F)
where
F: FnMut(&T, &T) -> bool,
{
@@ -557,7 +557,7 @@ where
///
/// 1. Number of elements smaller than `v[pivot]`.
/// 2. True if `v` was already partitioned.
-fn partition<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool)
+pub(super) fn partition<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool)
where
F: FnMut(&T, &T) -> bool,
{
@@ -612,7 +612,7 @@ where
///
/// Returns the number of elements equal to the pivot. It is assumed that `v` does not contain
/// elements smaller than the pivot.
-fn partition_equal<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> usize
+pub(super) fn partition_equal<T, F>(v: &mut [T], pivot: usize, is_less: &mut F) -> usize
where
F: FnMut(&T, &T) -> bool,
{
@@ -670,7 +670,7 @@ where
/// Scatters some elements around in an attempt to break patterns that might cause imbalanced
/// partitions in quicksort.
#[cold]
-fn break_patterns<T>(v: &mut [T]) {
+pub(super) fn break_patterns<T>(v: &mut [T]) {
let len = v.len();
if len >= 8 {
let mut seed = len;
@@ -719,7 +719,7 @@ fn break_patterns<T>(v: &mut [T]) {
/// Chooses a pivot in `v` and returns the index and `true` if the slice is likely already sorted.
///
/// Elements in `v` might be reordered in the process.
-fn choose_pivot<T, F>(v: &mut [T], is_less: &mut F) -> (usize, bool)
+pub(super) fn choose_pivot<T, F>(v: &mut [T], is_less: &mut F) -> (usize, bool)
where
F: FnMut(&T, &T) -> bool,
{
@@ -897,138 +897,6 @@ where
recurse(v, &mut is_less, None, limit);
}
-fn partition_at_index_loop<'a, T, F>(
- mut v: &'a mut [T],
- mut index: usize,
- is_less: &mut F,
- mut pred: Option<&'a T>,
-) where
- F: FnMut(&T, &T) -> bool,
-{
- // Limit the amount of iterations and fall back to heapsort, similarly to `slice::sort_unstable`.
- // This lowers the worst case running time from O(n^2) to O(n log n).
- // FIXME: Investigate whether it would be better to use something like Median of Medians
- // or Fast Deterministic Selection to guarantee O(n) worst case.
- let mut limit = usize::BITS - v.len().leading_zeros();
-
- // True if the last partitioning was reasonably balanced.
- let mut was_balanced = true;
-
- loop {
- let len = v.len();
-
- // For slices of up to this length it's probably faster to simply sort them.
- const MAX_INSERTION: usize = 10;
- if len <= MAX_INSERTION {
- if len >= 2 {
- insertion_sort_shift_left(v, 1, is_less);
- }
- return;
- }
-
- if limit == 0 {
- heapsort(v, is_less);
- return;
- }
-
- // If the last partitioning was imbalanced, try breaking patterns in the slice by shuffling
- // some elements around. Hopefully we'll choose a better pivot this time.
- if !was_balanced {
- break_patterns(v);
- limit -= 1;
- }
-
- // Choose a pivot
- let (pivot, _) = choose_pivot(v, is_less);
-
- // If the chosen pivot is equal to the predecessor, then it's the smallest element in the
- // slice. Partition the slice into elements equal to and elements greater than the pivot.
- // This case is usually hit when the slice contains many duplicate elements.
- if let Some(p) = pred {
- if !is_less(p, &v[pivot]) {
- let mid = partition_equal(v, pivot, is_less);
-
- // If we've passed our index, then we're good.
- if mid > index {
- return;
- }
-
- // Otherwise, continue sorting elements greater than the pivot.
- v = &mut v[mid..];
- index = index - mid;
- pred = None;
- continue;
- }
- }
-
- let (mid, _) = partition(v, pivot, is_less);
- was_balanced = cmp::min(mid, len - mid) >= len / 8;
-
- // Split the slice into `left`, `pivot`, and `right`.
- let (left, right) = v.split_at_mut(mid);
- let (pivot, right) = right.split_at_mut(1);
- let pivot = &pivot[0];
-
- if mid < index {
- v = right;
- index = index - mid - 1;
- pred = Some(pivot);
- } else if mid > index {
- v = left;
- } else {
- // If mid == index, then we're done, since partition() guaranteed that all elements
- // after mid are greater than or equal to mid.
- return;
- }
- }
-}
-
-/// Reorder the slice such that the element at `index` is at its final sorted position.
-pub fn partition_at_index<T, F>(
- v: &mut [T],
- index: usize,
- mut is_less: F,
-) -> (&mut [T], &mut T, &mut [T])
-where
- F: FnMut(&T, &T) -> bool,
-{
- use cmp::Ordering::Greater;
- use cmp::Ordering::Less;
-
- if index >= v.len() {
- panic!("partition_at_index index {} greater than length of slice {}", index, v.len());
- }
-
- if T::IS_ZST {
- // Sorting has no meaningful behavior on zero-sized types. Do nothing.
- } else if index == v.len() - 1 {
- // Find max element and place it in the last position of the array. We're free to use
- // `unwrap()` here because we know v must not be empty.
- let (max_index, _) = v
- .iter()
- .enumerate()
- .max_by(|&(_, x), &(_, y)| if is_less(x, y) { Less } else { Greater })
- .unwrap();
- v.swap(max_index, index);
- } else if index == 0 {
- // Find min element and place it in the first position of the array. We're free to use
- // `unwrap()` here because we know v must not be empty.
- let (min_index, _) = v
- .iter()
- .enumerate()
- .min_by(|&(_, x), &(_, y)| if is_less(x, y) { Less } else { Greater })
- .unwrap();
- v.swap(min_index, index);
- } else {
- partition_at_index_loop(v, index, &mut is_less, None);
- }
-
- let (left, right) = v.split_at_mut(index);
- let (pivot, right) = right.split_at_mut(1);
- let pivot = &mut pivot[0];
- (left, pivot, right)
-}
-
/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
/// stores the result into `v[..]`.
///
@@ -1085,12 +953,12 @@ where
// SAFETY: left and right must be valid and part of v same for out.
unsafe {
- let to_copy = if is_less(&*right, &**left) {
- get_and_increment(&mut right)
- } else {
- get_and_increment(left)
- };
- ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
+ let is_l = is_less(&*right, &**left);
+ let to_copy = if is_l { right } else { *left };
+ ptr::copy_nonoverlapping(to_copy, *out, 1);
+ *out = out.add(1);
+ right = right.add(is_l as usize);
+ *left = left.add(!is_l as usize);
}
}
} else {
@@ -1113,32 +981,18 @@ where
// SAFETY: left and right must be valid and part of v same for out.
unsafe {
- let to_copy = if is_less(&*right.sub(1), &*left.sub(1)) {
- decrement_and_get(left)
- } else {
- decrement_and_get(right)
- };
- ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
+ let is_l = is_less(&*right.sub(1), &*left.sub(1));
+ *left = left.sub(is_l as usize);
+ *right = right.sub(!is_l as usize);
+ let to_copy = if is_l { *left } else { *right };
+ out = out.sub(1);
+ ptr::copy_nonoverlapping(to_copy, out, 1);
}
}
}
// Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
// it will now be copied into the hole in `v`.
- unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
- let old = *ptr;
-
- // SAFETY: ptr.add(1) must still be a valid pointer and part of `v`.
- *ptr = unsafe { ptr.add(1) };
- old
- }
-
- unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
- // SAFETY: ptr.sub(1) must still be a valid pointer and part of `v`.
- *ptr = unsafe { ptr.sub(1) };
- *ptr
- }
-
// When dropped, copies the range `start..end` into `dest..`.
struct MergeHole<T> {
start: *mut T,
@@ -1456,7 +1310,6 @@ pub struct TimSortRun {
/// Takes a range as denoted by start and end, that is already sorted and extends it to the right if
/// necessary with sorts optimized for smaller ranges such as insertion sort.
-#[cfg(not(no_global_oom_handling))]
fn provide_sorted_batch<T, F>(v: &mut [T], start: usize, mut end: usize, is_less: &mut F) -> usize
where
F: FnMut(&T, &T) -> bool,
diff --git a/library/core/src/str/mod.rs b/library/core/src/str/mod.rs
index 041694299..ef05b25fd 100644
--- a/library/core/src/str/mod.rs
+++ b/library/core/src/str/mod.rs
@@ -16,6 +16,7 @@ mod validations;
use self::pattern::Pattern;
use self::pattern::{DoubleEndedSearcher, ReverseSearcher, Searcher};
+use crate::ascii;
use crate::char::{self, EscapeDebugExtArgs};
use crate::mem;
use crate::slice::{self, SliceIndex};
@@ -206,9 +207,8 @@ impl str {
/// ```
#[must_use]
#[stable(feature = "is_char_boundary", since = "1.9.0")]
- #[rustc_const_unstable(feature = "const_is_char_boundary", issue = "none")]
#[inline]
- pub const fn is_char_boundary(&self, index: usize) -> bool {
+ pub fn is_char_boundary(&self, index: usize) -> bool {
// 0 is always ok.
// Test for 0 explicitly so that it can optimize out the check
// easily and skip reading string data for that case.
@@ -436,9 +436,8 @@ impl str {
/// assert!(v.get(..42).is_none());
/// ```
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
- pub const fn get<I: ~const SliceIndex<str>>(&self, i: I) -> Option<&I::Output> {
+ pub fn get<I: SliceIndex<str>>(&self, i: I) -> Option<&I::Output> {
i.get(self)
}
@@ -469,9 +468,8 @@ impl str {
/// assert_eq!("HEllo", v);
/// ```
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
- pub const fn get_mut<I: ~const SliceIndex<str>>(&mut self, i: I) -> Option<&mut I::Output> {
+ pub fn get_mut<I: SliceIndex<str>>(&mut self, i: I) -> Option<&mut I::Output> {
i.get_mut(self)
}
@@ -502,9 +500,8 @@ impl str {
/// }
/// ```
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
- pub const unsafe fn get_unchecked<I: ~const SliceIndex<str>>(&self, i: I) -> &I::Output {
+ pub unsafe fn get_unchecked<I: SliceIndex<str>>(&self, i: I) -> &I::Output {
// SAFETY: the caller must uphold the safety contract for `get_unchecked`;
// the slice is dereferenceable because `self` is a safe reference.
// The returned pointer is safe because impls of `SliceIndex` have to guarantee that it is.
@@ -538,12 +535,8 @@ impl str {
/// }
/// ```
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
- #[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
#[inline]
- pub const unsafe fn get_unchecked_mut<I: ~const SliceIndex<str>>(
- &mut self,
- i: I,
- ) -> &mut I::Output {
+ pub unsafe fn get_unchecked_mut<I: SliceIndex<str>>(&mut self, i: I) -> &mut I::Output {
// SAFETY: the caller must uphold the safety contract for `get_unchecked_mut`;
// the slice is dereferenceable because `self` is a safe reference.
// The returned pointer is safe because impls of `SliceIndex` have to guarantee that it is.
@@ -2365,15 +2358,26 @@ impl str {
/// assert!(!non_ascii.is_ascii());
/// ```
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+ #[rustc_const_unstable(feature = "const_slice_is_ascii", issue = "111090")]
#[must_use]
#[inline]
- pub fn is_ascii(&self) -> bool {
+ pub const fn is_ascii(&self) -> bool {
// We can treat each byte as character here: all multibyte characters
// start with a byte that is not in the ASCII range, so we will stop
// there already.
self.as_bytes().is_ascii()
}
+ /// If this string slice [`is_ascii`](Self::is_ascii), returns it as a slice
+ /// of [ASCII characters](`ascii::Char`), otherwise returns `None`.
+ #[unstable(feature = "ascii_char", issue = "110998")]
+ #[must_use]
+ #[inline]
+ pub const fn as_ascii(&self) -> Option<&[ascii::Char]> {
+ // Like in `is_ascii`, we can work on the bytes directly.
+ self.as_bytes().as_ascii()
+ }
+
/// Checks that two strings are an ASCII case-insensitive match.
///
/// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`,
@@ -2582,8 +2586,7 @@ impl AsRef<[u8]> for str {
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl const Default for &str {
+impl Default for &str {
/// Creates an empty str
#[inline]
fn default() -> Self {
diff --git a/library/core/src/str/pattern.rs b/library/core/src/str/pattern.rs
index e3a464a1c..91ee2903a 100644
--- a/library/core/src/str/pattern.rs
+++ b/library/core/src/str/pattern.rs
@@ -791,8 +791,8 @@ pub struct CharArrayRefSearcher<'a, 'b, const N: usize>(
/// # Examples
///
/// ```
-/// assert_eq!("Hello world".find(['l', 'l']), Some(2));
-/// assert_eq!("Hello world".find(['l', 'l']), Some(2));
+/// assert_eq!("Hello world".find(['o', 'l']), Some(2));
+/// assert_eq!("Hello world".find(['h', 'w']), Some(6));
/// ```
impl<'a, const N: usize> Pattern<'a> for [char; N] {
pattern_methods!(CharArraySearcher<'a, N>, MultiCharEqPattern, CharArraySearcher);
@@ -811,8 +811,8 @@ unsafe impl<'a, const N: usize> ReverseSearcher<'a> for CharArraySearcher<'a, N>
/// # Examples
///
/// ```
-/// assert_eq!("Hello world".find(&['l', 'l']), Some(2));
-/// assert_eq!("Hello world".find(&['l', 'l']), Some(2));
+/// assert_eq!("Hello world".find(&['o', 'l']), Some(2));
+/// assert_eq!("Hello world".find(&['h', 'w']), Some(6));
/// ```
impl<'a, 'b, const N: usize> Pattern<'a> for &'b [char; N] {
pattern_methods!(CharArrayRefSearcher<'a, 'b, N>, MultiCharEqPattern, CharArrayRefSearcher);
diff --git a/library/core/src/str/traits.rs b/library/core/src/str/traits.rs
index 41c097b55..1d52335f2 100644
--- a/library/core/src/str/traits.rs
+++ b/library/core/src/str/traits.rs
@@ -50,10 +50,9 @@ impl PartialOrd for str {
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-impl<I> const ops::Index<I> for str
+impl<I> ops::Index<I> for str
where
- I: ~const SliceIndex<str>,
+ I: SliceIndex<str>,
{
type Output = I::Output;
@@ -64,10 +63,9 @@ where
}
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-impl<I> const ops::IndexMut<I> for str
+impl<I> ops::IndexMut<I> for str
where
- I: ~const SliceIndex<str>,
+ I: SliceIndex<str>,
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut I::Output {
@@ -96,7 +94,7 @@ const fn str_index_overflow_fail() -> ! {
/// Equivalent to `&self[0 .. len]` or `&mut self[0 .. len]`.
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl const SliceIndex<str> for ops::RangeFull {
+unsafe impl SliceIndex<str> for ops::RangeFull {
type Output = str;
#[inline]
fn get(self, slice: &str) -> Option<&Self::Output> {
@@ -161,7 +159,7 @@ unsafe impl const SliceIndex<str> for ops::RangeFull {
/// ```
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl const SliceIndex<str> for ops::Range<usize> {
+unsafe impl SliceIndex<str> for ops::Range<usize> {
type Output = str;
#[inline]
fn get(self, slice: &str) -> Option<&Self::Output> {
@@ -271,7 +269,7 @@ unsafe impl const SliceIndex<str> for ops::Range<usize> {
/// character (as defined by `is_char_boundary`), or if `end > len`.
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl const SliceIndex<str> for ops::RangeTo<usize> {
+unsafe impl SliceIndex<str> for ops::RangeTo<usize> {
type Output = str;
#[inline]
fn get(self, slice: &str) -> Option<&Self::Output> {
@@ -340,7 +338,7 @@ unsafe impl const SliceIndex<str> for ops::RangeTo<usize> {
/// a character (as defined by `is_char_boundary`), or if `begin > len`.
#[stable(feature = "str_checked_slicing", since = "1.20.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl const SliceIndex<str> for ops::RangeFrom<usize> {
+unsafe impl SliceIndex<str> for ops::RangeFrom<usize> {
type Output = str;
#[inline]
fn get(self, slice: &str) -> Option<&Self::Output> {
@@ -412,7 +410,7 @@ unsafe impl const SliceIndex<str> for ops::RangeFrom<usize> {
/// byte offset or equal to `len`), if `begin > end`, or if `end >= len`.
#[stable(feature = "inclusive_range", since = "1.26.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl const SliceIndex<str> for ops::RangeInclusive<usize> {
+unsafe impl SliceIndex<str> for ops::RangeInclusive<usize> {
type Output = str;
#[inline]
fn get(self, slice: &str) -> Option<&Self::Output> {
@@ -464,7 +462,7 @@ unsafe impl const SliceIndex<str> for ops::RangeInclusive<usize> {
/// `is_char_boundary`, or equal to `len`), or if `end >= len`.
#[stable(feature = "inclusive_range", since = "1.26.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
-unsafe impl const SliceIndex<str> for ops::RangeToInclusive<usize> {
+unsafe impl SliceIndex<str> for ops::RangeToInclusive<usize> {
type Output = str;
#[inline]
fn get(self, slice: &str) -> Option<&Self::Output> {
diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs
index f1ed68d72..236b7f423 100644
--- a/library/core/src/sync/atomic.rs
+++ b/library/core/src/sync/atomic.rs
@@ -147,8 +147,7 @@ pub struct AtomicBool {
#[cfg(target_has_atomic_load_store = "8")]
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl const Default for AtomicBool {
+impl Default for AtomicBool {
/// Creates an `AtomicBool` initialized to `false`.
#[inline]
fn default() -> Self {
@@ -179,8 +178,7 @@ pub struct AtomicPtr<T> {
#[cfg(target_has_atomic_load_store = "ptr")]
#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
-impl<T> const Default for AtomicPtr<T> {
+impl<T> Default for AtomicPtr<T> {
/// Creates a null `AtomicPtr<T>`.
fn default() -> AtomicPtr<T> {
AtomicPtr::new(crate::ptr::null_mut())
@@ -1916,8 +1914,7 @@ impl<T> AtomicPtr<T> {
#[cfg(target_has_atomic_load_store = "8")]
#[stable(feature = "atomic_bool_from", since = "1.24.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl const From<bool> for AtomicBool {
+impl From<bool> for AtomicBool {
/// Converts a `bool` into an `AtomicBool`.
///
/// # Examples
@@ -1935,8 +1932,7 @@ impl const From<bool> for AtomicBool {
#[cfg(target_has_atomic_load_store = "ptr")]
#[stable(feature = "atomic_from", since = "1.23.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<*mut T> for AtomicPtr<T> {
+impl<T> From<*mut T> for AtomicPtr<T> {
/// Converts a `*mut T` into an `AtomicPtr<T>`.
#[inline]
fn from(p: *mut T) -> Self {
@@ -2002,8 +1998,7 @@ macro_rules! atomic_int {
pub const $atomic_init: $atomic_type = $atomic_type::new(0);
#[$stable]
- #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
- impl const Default for $atomic_type {
+ impl Default for $atomic_type {
#[inline]
fn default() -> Self {
Self::new(Default::default())
@@ -2011,8 +2006,7 @@ macro_rules! atomic_int {
}
#[$stable_from]
- #[rustc_const_unstable(feature = "const_num_from_num", issue = "87852")]
- impl const From<$int_type> for $atomic_type {
+ impl From<$int_type> for $atomic_type {
#[doc = concat!("Converts an `", stringify!($int_type), "` into an `", stringify!($atomic_type), "`.")]
#[inline]
fn from(v: $int_type) -> Self { Self::new(v) }
diff --git a/library/core/src/task/mod.rs b/library/core/src/task/mod.rs
index c5f89b9a2..3f0080e38 100644
--- a/library/core/src/task/mod.rs
+++ b/library/core/src/task/mod.rs
@@ -13,5 +13,3 @@ pub use self::wake::{Context, RawWaker, RawWakerVTable, Waker};
mod ready;
#[stable(feature = "ready_macro", since = "1.64.0")]
pub use ready::ready;
-#[unstable(feature = "poll_ready", issue = "89780")]
-pub use ready::Ready;
diff --git a/library/core/src/task/poll.rs b/library/core/src/task/poll.rs
index af5bf441b..0a0f702f6 100644
--- a/library/core/src/task/poll.rs
+++ b/library/core/src/task/poll.rs
@@ -3,7 +3,6 @@
use crate::convert;
use crate::ops::{self, ControlFlow};
use crate::result::Result;
-use crate::task::Ready;
/// Indicates whether a value is available or if the current task has been
/// scheduled to receive a wakeup instead.
@@ -95,38 +94,6 @@ impl<T> Poll<T> {
pub const fn is_pending(&self) -> bool {
!self.is_ready()
}
-
- /// Extracts the successful type of a [`Poll<T>`].
- ///
- /// When combined with the `?` operator, this function will
- /// propagate any [`Poll::Pending`] values to the caller, and
- /// extract the `T` from [`Poll::Ready`].
- ///
- /// # Examples
- ///
- /// ```rust
- /// #![feature(poll_ready)]
- ///
- /// use std::task::{Context, Poll};
- /// use std::future::{self, Future};
- /// use std::pin::Pin;
- ///
- /// pub fn do_poll(cx: &mut Context<'_>) -> Poll<()> {
- /// let mut fut = future::ready(42);
- /// let fut = Pin::new(&mut fut);
- ///
- /// let num = fut.poll(cx).ready()?;
- /// # drop(num);
- /// // ... use num
- ///
- /// Poll::Ready(())
- /// }
- /// ```
- #[inline]
- #[unstable(feature = "poll_ready", issue = "89780")]
- pub fn ready(self) -> Ready<T> {
- Ready(self)
- }
}
impl<T, E> Poll<Result<T, E>> {
@@ -247,8 +214,7 @@ impl<T, E> Poll<Option<Result<T, E>>> {
}
#[stable(feature = "futures_api", since = "1.36.0")]
-#[rustc_const_unstable(feature = "const_convert", issue = "88674")]
-impl<T> const From<T> for Poll<T> {
+impl<T> From<T> for Poll<T> {
/// Moves the value into a [`Poll::Ready`] to make a `Poll<T>`.
///
/// # Example
diff --git a/library/core/src/task/ready.rs b/library/core/src/task/ready.rs
index b1daf545f..495d72fd1 100644
--- a/library/core/src/task/ready.rs
+++ b/library/core/src/task/ready.rs
@@ -1,8 +1,3 @@
-use core::convert;
-use core::fmt;
-use core::ops::{ControlFlow, FromResidual, Try};
-use core::task::Poll;
-
/// Extracts the successful type of a [`Poll<T>`].
///
/// This macro bakes in propagation of [`Pending`] signals by returning early.
@@ -22,7 +17,7 @@ use core::task::Poll;
/// let fut = Pin::new(&mut fut);
///
/// let num = ready!(fut.poll(cx));
-/// # drop(num);
+/// # let _ = num;
/// // ... use num
///
/// Poll::Ready(())
@@ -44,7 +39,7 @@ use core::task::Poll;
/// Poll::Ready(t) => t,
/// Poll::Pending => return Poll::Pending,
/// };
-/// # drop(num);
+/// # let _ = num; // to silence unused warning
/// # // ... use num
/// #
/// # Poll::Ready(())
@@ -60,55 +55,3 @@ pub macro ready($e:expr) {
}
}
}
-
-/// Extracts the successful type of a [`Poll<T>`].
-///
-/// See [`Poll::ready`] for details.
-#[unstable(feature = "poll_ready", issue = "89780")]
-pub struct Ready<T>(pub(crate) Poll<T>);
-
-#[unstable(feature = "poll_ready", issue = "89780")]
-impl<T> Try for Ready<T> {
- type Output = T;
- type Residual = Ready<convert::Infallible>;
-
- #[inline]
- fn from_output(output: Self::Output) -> Self {
- Ready(Poll::Ready(output))
- }
-
- #[inline]
- fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
- match self.0 {
- Poll::Ready(v) => ControlFlow::Continue(v),
- Poll::Pending => ControlFlow::Break(Ready(Poll::Pending)),
- }
- }
-}
-
-#[unstable(feature = "poll_ready", issue = "89780")]
-impl<T> FromResidual for Ready<T> {
- #[inline]
- fn from_residual(residual: Ready<convert::Infallible>) -> Self {
- match residual.0 {
- Poll::Pending => Ready(Poll::Pending),
- }
- }
-}
-
-#[unstable(feature = "poll_ready", issue = "89780")]
-impl<T> FromResidual<Ready<convert::Infallible>> for Poll<T> {
- #[inline]
- fn from_residual(residual: Ready<convert::Infallible>) -> Self {
- match residual.0 {
- Poll::Pending => Poll::Pending,
- }
- }
-}
-
-#[unstable(feature = "poll_ready", issue = "89780")]
-impl<T> fmt::Debug for Ready<T> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("Ready").finish()
- }
-}
diff --git a/library/core/src/task/wake.rs b/library/core/src/task/wake.rs
index 808825326..7043ab5ff 100644
--- a/library/core/src/task/wake.rs
+++ b/library/core/src/task/wake.rs
@@ -232,7 +232,7 @@ impl fmt::Debug for Context<'_> {
///
/// [`Future::poll()`]: core::future::Future::poll
/// [`Poll::Pending`]: core::task::Poll::Pending
-#[repr(transparent)]
+#[cfg_attr(not(doc), repr(transparent))] // work around https://github.com/rust-lang/rust/issues/66401
#[stable(feature = "futures_api", since = "1.36.0")]
pub struct Waker {
waker: RawWaker,
diff --git a/library/core/src/time.rs b/library/core/src/time.rs
index ba1cb6efa..b08d5782a 100644
--- a/library/core/src/time.rs
+++ b/library/core/src/time.rs
@@ -735,8 +735,7 @@ impl Duration {
#[stable(feature = "duration_float", since = "1.38.0")]
#[must_use]
#[inline]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
- pub const fn from_secs_f64(secs: f64) -> Duration {
+ pub fn from_secs_f64(secs: f64) -> Duration {
match Duration::try_from_secs_f64(secs) {
Ok(v) => v,
Err(e) => panic!("{}", e.description()),
@@ -773,8 +772,7 @@ impl Duration {
#[stable(feature = "duration_float", since = "1.38.0")]
#[must_use]
#[inline]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
- pub const fn from_secs_f32(secs: f32) -> Duration {
+ pub fn from_secs_f32(secs: f32) -> Duration {
match Duration::try_from_secs_f32(secs) {
Ok(v) => v,
Err(e) => panic!("{}", e.description()),
@@ -798,8 +796,7 @@ impl Duration {
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
- pub const fn mul_f64(self, rhs: f64) -> Duration {
+ pub fn mul_f64(self, rhs: f64) -> Duration {
Duration::from_secs_f64(rhs * self.as_secs_f64())
}
@@ -820,8 +817,7 @@ impl Duration {
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
- pub const fn mul_f32(self, rhs: f32) -> Duration {
+ pub fn mul_f32(self, rhs: f32) -> Duration {
Duration::from_secs_f32(rhs * self.as_secs_f32())
}
@@ -842,8 +838,7 @@ impl Duration {
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
- pub const fn div_f64(self, rhs: f64) -> Duration {
+ pub fn div_f64(self, rhs: f64) -> Duration {
Duration::from_secs_f64(self.as_secs_f64() / rhs)
}
@@ -866,8 +861,7 @@ impl Duration {
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
- pub const fn div_f32(self, rhs: f32) -> Duration {
+ pub fn div_f32(self, rhs: f32) -> Duration {
Duration::from_secs_f32(self.as_secs_f32() / rhs)
}
@@ -1178,7 +1172,7 @@ impl fmt::Debug for Duration {
emit_without_padding(f)
} else {
// We need to add padding. Use the `Formatter::padding` helper function.
- let default_align = crate::fmt::rt::v1::Alignment::Left;
+ let default_align = fmt::Alignment::Left;
let post_padding = f.padding(requested_w - actual_w, default_align)?;
emit_without_padding(f)?;
post_padding.write(f)
@@ -1402,9 +1396,8 @@ impl Duration {
/// assert_eq!(res, Ok(Duration::new(1, 2_929_688)));
/// ```
#[stable(feature = "duration_checked_float", since = "1.66.0")]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
#[inline]
- pub const fn try_from_secs_f32(secs: f32) -> Result<Duration, TryFromFloatSecsError> {
+ pub fn try_from_secs_f32(secs: f32) -> Result<Duration, TryFromFloatSecsError> {
try_from_secs!(
secs = secs,
mantissa_bits = 23,
@@ -1479,9 +1472,8 @@ impl Duration {
/// assert_eq!(res, Ok(Duration::new(1, 2_929_688)));
/// ```
#[stable(feature = "duration_checked_float", since = "1.66.0")]
- #[rustc_const_unstable(feature = "duration_consts_float", issue = "72440")]
#[inline]
- pub const fn try_from_secs_f64(secs: f64) -> Result<Duration, TryFromFloatSecsError> {
+ pub fn try_from_secs_f64(secs: f64) -> Result<Duration, TryFromFloatSecsError> {
try_from_secs!(
secs = secs,
mantissa_bits = 52,
diff --git a/library/core/src/tuple.rs b/library/core/src/tuple.rs
index 0620e7173..a1388dfee 100644
--- a/library/core/src/tuple.rs
+++ b/library/core/src/tuple.rs
@@ -1,7 +1,6 @@
// See src/libstd/primitive_docs.rs for documentation.
use crate::cmp::Ordering::{self, *};
-use crate::mem::transmute;
// Recursive macro for implementing n-ary tuple functions and operations
//
@@ -22,8 +21,7 @@ macro_rules! tuple_impls {
maybe_tuple_doc! {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl<$($T: ~const PartialEq),+> const PartialEq for ($($T,)+)
+ impl<$($T: PartialEq),+> PartialEq for ($($T,)+)
where
last_type!($($T,)+): ?Sized
{
@@ -50,8 +48,7 @@ macro_rules! tuple_impls {
maybe_tuple_doc! {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl<$($T: ~const PartialOrd + ~const PartialEq),+> const PartialOrd for ($($T,)+)
+ impl<$($T: PartialOrd),+> PartialOrd for ($($T,)+)
where
last_type!($($T,)+): ?Sized
{
@@ -81,8 +78,7 @@ macro_rules! tuple_impls {
maybe_tuple_doc! {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
- impl<$($T: ~const Ord),+> const Ord for ($($T,)+)
+ impl<$($T: Ord),+> Ord for ($($T,)+)
where
last_type!($($T,)+): ?Sized
{
@@ -96,14 +92,33 @@ macro_rules! tuple_impls {
maybe_tuple_doc! {
$($T)+ @
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
- impl<$($T: ~const Default),+> const Default for ($($T,)+) {
+ impl<$($T: Default),+> Default for ($($T,)+) {
#[inline]
fn default() -> ($($T,)+) {
($({ let x: $T = Default::default(); x},)+)
}
}
}
+
+ #[stable(feature = "array_tuple_conv", since = "1.71.0")]
+ impl<T> From<[T; ${count(T)}]> for ($(${ignore(T)} T,)+) {
+ #[inline]
+ #[allow(non_snake_case)]
+ fn from(array: [T; ${count(T)}]) -> Self {
+ let [$($T,)+] = array;
+ ($($T,)+)
+ }
+ }
+
+ #[stable(feature = "array_tuple_conv", since = "1.71.0")]
+ impl<T> From<($(${ignore(T)} T,)+)> for [T; ${count(T)}] {
+ #[inline]
+ #[allow(non_snake_case)]
+ fn from(tuple: ($(${ignore(T)} T,)+)) -> Self {
+ let ($($T,)+) = tuple;
+ [$($T,)+]
+ }
+ }
}
}
@@ -126,16 +141,13 @@ macro_rules! maybe_tuple_doc {
#[inline]
const fn ordering_is_some(c: Option<Ordering>, x: Ordering) -> bool {
// FIXME: Just use `==` once that's const-stable on `Option`s.
- // This isn't using `match` because that optimizes worse due to
- // making a two-step check (`Some` *then* the inner value).
-
- // SAFETY: There's no public guarantee for `Option<Ordering>`,
- // but we're core so we know that it's definitely a byte.
- unsafe {
- let c: i8 = transmute(c);
- let x: i8 = transmute(Some(x));
- c == x
- }
+ // This is mapping `None` to 2 and then doing the comparison afterwards
+ // because it optimizes better (`None::<Ordering>` is represented as 2).
+ x as i8
+ == match c {
+ Some(c) => c as i8,
+ None => 2,
+ }
}
// Constructs an expression that performs a lexical ordering using method `$rel`.