summaryrefslogtreecommitdiffstats
path: root/library/core/src/slice
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--library/core/src/slice/ascii.rs2
-rw-r--r--library/core/src/slice/index.rs112
-rw-r--r--library/core/src/slice/iter.rs25
-rw-r--r--library/core/src/slice/iter/macros.rs26
-rw-r--r--library/core/src/slice/memchr.rs41
-rw-r--r--library/core/src/slice/mod.rs177
-rw-r--r--library/core/src/slice/raw.rs38
-rw-r--r--library/core/src/slice/rotate.rs4
-rw-r--r--library/core/src/slice/sort.rs42
9 files changed, 336 insertions, 131 deletions
diff --git a/library/core/src/slice/ascii.rs b/library/core/src/slice/ascii.rs
index 63715a6b8..5e5399acc 100644
--- a/library/core/src/slice/ascii.rs
+++ b/library/core/src/slice/ascii.rs
@@ -215,8 +215,6 @@ impl<'a> iter::DoubleEndedIterator for EscapeAscii<'a> {
}
}
#[stable(feature = "inherent_ascii_escape", since = "1.60.0")]
-impl<'a> iter::ExactSizeIterator for EscapeAscii<'a> {}
-#[stable(feature = "inherent_ascii_escape", since = "1.60.0")]
impl<'a> iter::FusedIterator for EscapeAscii<'a> {}
#[stable(feature = "inherent_ascii_escape", since = "1.60.0")]
impl<'a> fmt::Display for EscapeAscii<'a> {
diff --git a/library/core/src/slice/index.rs b/library/core/src/slice/index.rs
index fd7ecf3da..6d2f7330d 100644
--- a/library/core/src/slice/index.rs
+++ b/library/core/src/slice/index.rs
@@ -48,10 +48,12 @@ const fn slice_start_index_len_fail(index: usize, len: usize) -> ! {
}
// FIXME const-hack
+#[track_caller]
fn slice_start_index_len_fail_rt(index: usize, len: usize) -> ! {
panic!("range start index {index} out of range for slice of length {len}");
}
+#[track_caller]
const fn slice_start_index_len_fail_ct(_: usize, _: usize) -> ! {
panic!("slice start index is out of range for slice");
}
@@ -69,10 +71,12 @@ const fn slice_end_index_len_fail(index: usize, len: usize) -> ! {
}
// FIXME const-hack
+#[track_caller]
fn slice_end_index_len_fail_rt(index: usize, len: usize) -> ! {
panic!("range end index {index} out of range for slice of length {len}");
}
+#[track_caller]
const fn slice_end_index_len_fail_ct(_: usize, _: usize) -> ! {
panic!("slice end index is out of range for slice");
}
@@ -88,10 +92,12 @@ const fn slice_index_order_fail(index: usize, end: usize) -> ! {
}
// FIXME const-hack
+#[track_caller]
fn slice_index_order_fail_rt(index: usize, end: usize) -> ! {
panic!("slice index starts at {index} but ends at {end}");
}
+#[track_caller]
const fn slice_index_order_fail_ct(_: usize, _: usize) -> ! {
panic!("slice index start is larger than end");
}
@@ -133,6 +139,8 @@ mod private_slice_index {
impl Sealed for ops::RangeToInclusive<usize> {}
#[stable(feature = "slice_index_with_ops_bound_pair", since = "1.53.0")]
impl Sealed for (ops::Bound<usize>, ops::Bound<usize>) {}
+
+ impl Sealed for ops::IndexRange {}
}
/// A helper trait used for indexing operations.
@@ -152,6 +160,7 @@ mod private_slice_index {
message = "the type `{T}` cannot be indexed by `{Self}`",
label = "slice indices are of type `usize` or ranges of `usize`"
)]
+#[const_trait]
pub unsafe trait SliceIndex<T: ?Sized>: private_slice_index::Sealed {
/// The output type returned by methods.
#[stable(feature = "slice_get_slice", since = "1.28.0")]
@@ -217,21 +226,29 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
#[inline]
unsafe fn get_unchecked(self, slice: *const [T]) -> *const T {
+ let this = self;
// SAFETY: the caller guarantees that `slice` is not dangling, so it
// cannot be longer than `isize::MAX`. They also guarantee that
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
// so the call to `add` is safe.
unsafe {
- assert_unsafe_precondition!(self < slice.len());
+ assert_unsafe_precondition!(
+ "slice::get_unchecked requires that the index is within the slice",
+ [T](this: usize, slice: *const [T]) => this < slice.len()
+ );
slice.as_ptr().add(self)
}
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
+ let this = self;
// SAFETY: see comments for `get_unchecked` above.
unsafe {
- assert_unsafe_precondition!(self < slice.len());
+ assert_unsafe_precondition!(
+ "slice::get_unchecked_mut requires that the index is within the slice",
+ [T](this: usize, slice: *mut [T]) => this < slice.len()
+ );
slice.as_mut_ptr().add(self)
}
}
@@ -249,6 +266,83 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
}
}
+/// Because `IndexRange` guarantees `start <= end`, fewer checks are needed here
+/// than there are for a general `Range<usize>` (which might be `100..3`).
+#[rustc_const_unstable(feature = "const_index_range_slice_index", issue = "none")]
+unsafe impl<T> const SliceIndex<[T]> for ops::IndexRange {
+ type Output = [T];
+
+ #[inline]
+ fn get(self, slice: &[T]) -> Option<&[T]> {
+ if self.end() <= slice.len() {
+ // SAFETY: `self` is checked to be valid and in bounds above.
+ unsafe { Some(&*self.get_unchecked(slice)) }
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
+ if self.end() <= slice.len() {
+ // SAFETY: `self` is checked to be valid and in bounds above.
+ unsafe { Some(&mut *self.get_unchecked_mut(slice)) }
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
+ let end = self.end();
+ // SAFETY: the caller guarantees that `slice` is not dangling, so it
+ // cannot be longer than `isize::MAX`. They also guarantee that
+ // `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
+ // so the call to `add` is safe.
+
+ unsafe {
+ assert_unsafe_precondition!(
+ "slice::get_unchecked requires that the index is within the slice",
+ [T](end: usize, slice: *const [T]) => end <= slice.len()
+ );
+ ptr::slice_from_raw_parts(slice.as_ptr().add(self.start()), self.len())
+ }
+ }
+
+ #[inline]
+ unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
+ let end = self.end();
+ // SAFETY: see comments for `get_unchecked` above.
+ unsafe {
+ assert_unsafe_precondition!(
+ "slice::get_unchecked_mut requires that the index is within the slice",
+ [T](end: usize, slice: *mut [T]) => end <= slice.len()
+ );
+ ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start()), self.len())
+ }
+ }
+
+ #[inline]
+ fn index(self, slice: &[T]) -> &[T] {
+ if self.end() <= slice.len() {
+ // SAFETY: `self` is checked to be valid and in bounds above.
+ unsafe { &*self.get_unchecked(slice) }
+ } else {
+ slice_end_index_len_fail(self.end(), slice.len())
+ }
+ }
+
+ #[inline]
+ fn index_mut(self, slice: &mut [T]) -> &mut [T] {
+ if self.end() <= slice.len() {
+ // SAFETY: `self` is checked to be valid and in bounds above.
+ unsafe { &mut *self.get_unchecked_mut(slice) }
+ } else {
+ slice_end_index_len_fail(self.end(), slice.len())
+ }
+ }
+}
+
#[stable(feature = "slice_get_slice_impls", since = "1.15.0")]
#[rustc_const_unstable(feature = "const_slice_index", issue = "none")]
unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
@@ -276,22 +370,32 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
#[inline]
unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] {
+ let this = ops::Range { start: self.start, end: self.end };
// SAFETY: the caller guarantees that `slice` is not dangling, so it
// cannot be longer than `isize::MAX`. They also guarantee that
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
// so the call to `add` is safe.
unsafe {
- assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
+ assert_unsafe_precondition!(
+ "slice::get_unchecked requires that the range is within the slice",
+ [T](this: ops::Range<usize>, slice: *const [T]) =>
+ this.end >= this.start && this.end <= slice.len()
+ );
ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
}
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
+ let this = ops::Range { start: self.start, end: self.end };
// SAFETY: see comments for `get_unchecked` above.
unsafe {
- assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
+ assert_unsafe_precondition!(
+ "slice::get_unchecked_mut requires that the range is within the slice",
+ [T](this: ops::Range<usize>, slice: *mut [T]) =>
+ this.end >= this.start && this.end <= slice.len()
+ );
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
}
}
diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs
index f1e659309..8a8962828 100644
--- a/library/core/src/slice/iter.rs
+++ b/library/core/src/slice/iter.rs
@@ -9,7 +9,7 @@ use crate::fmt;
use crate::intrinsics::{assume, exact_div, unchecked_sub};
use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce};
use crate::marker::{PhantomData, Send, Sized, Sync};
-use crate::mem;
+use crate::mem::{self, SizedTypeProperties};
use crate::num::NonZeroUsize;
use crate::ptr::NonNull;
@@ -91,11 +91,8 @@ impl<'a, T> Iter<'a, T> {
unsafe {
assume(!ptr.is_null());
- let end = if mem::size_of::<T>() == 0 {
- (ptr as *const u8).wrapping_add(slice.len()) as *const T
- } else {
- ptr.add(slice.len())
- };
+ let end =
+ if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) };
Self { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: PhantomData }
}
@@ -127,6 +124,7 @@ impl<'a, T> Iter<'a, T> {
/// ```
#[must_use]
#[stable(feature = "iter_to_slice", since = "1.4.0")]
+ #[inline]
pub fn as_slice(&self) -> &'a [T] {
self.make_slice()
}
@@ -146,6 +144,7 @@ iterator! {struct Iter -> *const T, &'a T, const, {/* no mut */}, {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Iter<'_, T> {
+ #[inline]
fn clone(&self) -> Self {
Iter { ptr: self.ptr, end: self.end, _marker: self._marker }
}
@@ -153,6 +152,7 @@ impl<T> Clone for Iter<'_, T> {
#[stable(feature = "slice_iter_as_ref", since = "1.13.0")]
impl<T> AsRef<[T]> for Iter<'_, T> {
+ #[inline]
fn as_ref(&self) -> &[T] {
self.as_slice()
}
@@ -227,11 +227,8 @@ impl<'a, T> IterMut<'a, T> {
unsafe {
assume(!ptr.is_null());
- let end = if mem::size_of::<T>() == 0 {
- (ptr as *mut u8).wrapping_add(slice.len()) as *mut T
- } else {
- ptr.add(slice.len())
- };
+ let end =
+ if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) };
Self { ptr: NonNull::new_unchecked(ptr), end, _marker: PhantomData }
}
@@ -303,6 +300,7 @@ impl<'a, T> IterMut<'a, T> {
/// ```
#[must_use]
#[stable(feature = "slice_iter_mut_as_slice", since = "1.53.0")]
+ #[inline]
pub fn as_slice(&self) -> &[T] {
self.make_slice()
}
@@ -351,6 +349,7 @@ impl<'a, T> IterMut<'a, T> {
#[stable(feature = "slice_iter_mut_as_slice", since = "1.53.0")]
impl<T> AsRef<[T]> for IterMut<'_, T> {
+ #[inline]
fn as_ref(&self) -> &[T] {
self.as_slice()
}
@@ -2754,10 +2753,10 @@ impl<'a, T> Iterator for RChunksMut<'a, T> {
None => 0,
};
// SAFETY: This type ensures that self.v is a valid pointer with a correct len.
- // Therefore the bounds check in split_at_mut guarantess the split point is inbounds.
+ // Therefore the bounds check in split_at_mut guarantees the split point is inbounds.
let (head, tail) = unsafe { self.v.split_at_mut(start) };
// SAFETY: This type ensures that self.v is a valid pointer with a correct len.
- // Therefore the bounds check in split_at_mut guarantess the split point is inbounds.
+ // Therefore the bounds check in split_at_mut guarantees the split point is inbounds.
let (nth, _) = unsafe { tail.split_at_mut(end - start) };
self.v = head;
// SAFETY: Nothing else points to or will point to the contents of this slice.
diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs
index c05242222..ce51d48e3 100644
--- a/library/core/src/slice/iter/macros.rs
+++ b/library/core/src/slice/iter/macros.rs
@@ -64,7 +64,7 @@ macro_rules! iterator {
// backwards by `n`. `n` must not exceed `self.len()`.
macro_rules! zst_shrink {
($self: ident, $n: ident) => {
- $self.end = ($self.end as * $raw_mut u8).wrapping_offset(-$n) as * $raw_mut T;
+ $self.end = $self.end.wrapping_byte_sub($n);
}
}
@@ -82,7 +82,7 @@ macro_rules! iterator {
// returning the old start.
// Unsafe because the offset must not exceed `self.len()`.
#[inline(always)]
- unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T {
+ unsafe fn post_inc_start(&mut self, offset: usize) -> * $raw_mut T {
if mem::size_of::<T>() == 0 {
zst_shrink!(self, offset);
self.ptr.as_ptr()
@@ -90,7 +90,7 @@ macro_rules! iterator {
let old = self.ptr.as_ptr();
// SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`,
// so this new pointer is inside `self` and thus guaranteed to be non-null.
- self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) };
+ self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().add(offset)) };
old
}
}
@@ -99,15 +99,15 @@ macro_rules! iterator {
// returning the new end.
// Unsafe because the offset must not exceed `self.len()`.
#[inline(always)]
- unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T {
- if mem::size_of::<T>() == 0 {
+ unsafe fn pre_dec_end(&mut self, offset: usize) -> * $raw_mut T {
+ if T::IS_ZST {
zst_shrink!(self, offset);
self.ptr.as_ptr()
} else {
// SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`,
// which is guaranteed to not overflow an `isize`. Also, the resulting pointer
// is in bounds of `slice`, which fulfills the other requirements for `offset`.
- self.end = unsafe { self.end.offset(-offset) };
+ self.end = unsafe { self.end.sub(offset) };
self.end
}
}
@@ -140,7 +140,7 @@ macro_rules! iterator {
// since we check if the iterator is empty first.
unsafe {
assume(!self.ptr.as_ptr().is_null());
- if mem::size_of::<T>() != 0 {
+ if !<T>::IS_ZST {
assume(!self.end.is_null());
}
if is_empty!(self) {
@@ -166,7 +166,7 @@ macro_rules! iterator {
fn nth(&mut self, n: usize) -> Option<$elem> {
if n >= len!(self) {
// This iterator is now empty.
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// We have to do it this way as `ptr` may never be 0, but `end`
// could be (due to wrapping).
self.end = self.ptr.as_ptr();
@@ -180,7 +180,7 @@ macro_rules! iterator {
}
// SAFETY: We are in bounds. `post_inc_start` does the right thing even for ZSTs.
unsafe {
- self.post_inc_start(n as isize);
+ self.post_inc_start(n);
Some(next_unchecked!(self))
}
}
@@ -189,7 +189,7 @@ macro_rules! iterator {
fn advance_by(&mut self, n: usize) -> Result<(), usize> {
let advance = cmp::min(len!(self), n);
// SAFETY: By construction, `advance` does not exceed `self.len()`.
- unsafe { self.post_inc_start(advance as isize) };
+ unsafe { self.post_inc_start(advance) };
if advance == n { Ok(()) } else { Err(advance) }
}
@@ -355,7 +355,7 @@ macro_rules! iterator {
// empty first.
unsafe {
assume(!self.ptr.as_ptr().is_null());
- if mem::size_of::<T>() != 0 {
+ if !<T>::IS_ZST {
assume(!self.end.is_null());
}
if is_empty!(self) {
@@ -375,7 +375,7 @@ macro_rules! iterator {
}
// SAFETY: We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
unsafe {
- self.pre_dec_end(n as isize);
+ self.pre_dec_end(n);
Some(next_back_unchecked!(self))
}
}
@@ -384,7 +384,7 @@ macro_rules! iterator {
fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
let advance = cmp::min(len!(self), n);
// SAFETY: By construction, `advance` does not exceed `self.len()`.
- unsafe { self.pre_dec_end(advance as isize) };
+ unsafe { self.pre_dec_end(advance) };
if advance == n { Ok(()) } else { Err(advance) }
}
}
diff --git a/library/core/src/slice/memchr.rs b/library/core/src/slice/memchr.rs
index dffeaf6a8..c848c2e18 100644
--- a/library/core/src/slice/memchr.rs
+++ b/library/core/src/slice/memchr.rs
@@ -16,35 +16,51 @@ const USIZE_BYTES: usize = mem::size_of::<usize>();
/// bytes where the borrow propagated all the way to the most significant
/// bit."
#[inline]
-fn contains_zero_byte(x: usize) -> bool {
+const fn contains_zero_byte(x: usize) -> bool {
x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0
}
#[cfg(target_pointer_width = "16")]
#[inline]
-fn repeat_byte(b: u8) -> usize {
+const fn repeat_byte(b: u8) -> usize {
(b as usize) << 8 | b as usize
}
#[cfg(not(target_pointer_width = "16"))]
#[inline]
-fn repeat_byte(b: u8) -> usize {
+const fn repeat_byte(b: u8) -> usize {
(b as usize) * (usize::MAX / 255)
}
/// Returns the first index matching the byte `x` in `text`.
#[must_use]
#[inline]
-pub fn memchr(x: u8, text: &[u8]) -> Option<usize> {
- // Fast path for small slices
+pub const fn memchr(x: u8, text: &[u8]) -> Option<usize> {
+ // Fast path for small slices.
if text.len() < 2 * USIZE_BYTES {
- return text.iter().position(|elt| *elt == x);
+ return memchr_naive(x, text);
}
- memchr_general_case(x, text)
+ memchr_aligned(x, text)
}
-fn memchr_general_case(x: u8, text: &[u8]) -> Option<usize> {
+#[inline]
+const fn memchr_naive(x: u8, text: &[u8]) -> Option<usize> {
+ let mut i = 0;
+
+ // FIXME(const-hack): Replace with `text.iter().pos(|c| *c == x)`.
+ while i < text.len() {
+ if text[i] == x {
+ return Some(i);
+ }
+
+ i += 1;
+ }
+
+ None
+}
+
+const fn memchr_aligned(x: u8, text: &[u8]) -> Option<usize> {
// Scan for a single byte value by reading two `usize` words at a time.
//
// Split `text` in three parts
@@ -59,7 +75,7 @@ fn memchr_general_case(x: u8, text: &[u8]) -> Option<usize> {
if offset > 0 {
offset = cmp::min(offset, len);
- if let Some(index) = text[..offset].iter().position(|elt| *elt == x) {
+ if let Some(index) = memchr_naive(x, &text[..offset]) {
return Some(index);
}
}
@@ -84,7 +100,8 @@ fn memchr_general_case(x: u8, text: &[u8]) -> Option<usize> {
}
// Find the byte after the point the body loop stopped.
- text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i)
+ // FIXME(const-hack): Use `?` instead.
+ if let Some(i) = memchr_naive(x, &text[offset..]) { Some(offset + i) } else { None }
}
/// Returns the last index matching the byte `x` in `text`.
@@ -124,8 +141,8 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
// SAFETY: offset starts at len - suffix.len(), as long as it is greater than
// min_aligned_offset (prefix.len()) the remaining distance is at least 2 * chunk_bytes.
unsafe {
- let u = *(ptr.offset(offset as isize - 2 * chunk_bytes as isize) as *const Chunk);
- let v = *(ptr.offset(offset as isize - chunk_bytes as isize) as *const Chunk);
+ let u = *(ptr.add(offset - 2 * chunk_bytes) as *const Chunk);
+ let v = *(ptr.add(offset - chunk_bytes) as *const Chunk);
// Break if there is a matching byte.
let zu = contains_zero_byte(u ^ repeated_x);
diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs
index e6ca6ef82..4f1bb1734 100644
--- a/library/core/src/slice/mod.rs
+++ b/library/core/src/slice/mod.rs
@@ -9,7 +9,7 @@
use crate::cmp::Ordering::{self, Greater, Less};
use crate::intrinsics::{assert_unsafe_precondition, exact_div};
use crate::marker::Copy;
-use crate::mem;
+use crate::mem::{self, SizedTypeProperties};
use crate::num::NonZeroUsize;
use crate::ops::{Bound, FnMut, OneSidedRange, Range, RangeBounds};
use crate::option::Option;
@@ -123,18 +123,11 @@ impl<T> [T] {
#[lang = "slice_len_fn"]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_slice_len", since = "1.39.0")]
+ #[rustc_allow_const_fn_unstable(ptr_metadata)]
#[inline]
#[must_use]
- // SAFETY: const sound because we transmute out the length field as a usize (which it must be)
pub const fn len(&self) -> usize {
- // FIXME: Replace with `crate::ptr::metadata(self)` when that is const-stable.
- // As of this writing this causes a "Const-stable functions can only call other
- // const-stable functions" error.
-
- // SAFETY: Accessing the value from the `PtrRepr` union is safe since *const T
- // and PtrComponents<T> have the same memory layouts. Only std can make this
- // guarantee.
- unsafe { crate::ptr::PtrRepr { const_ptr: self }.components.metadata }
+ ptr::metadata(self)
}
/// Returns `true` if the slice has a length of 0.
@@ -656,10 +649,14 @@ impl<T> [T] {
#[unstable(feature = "slice_swap_unchecked", issue = "88539")]
#[rustc_const_unstable(feature = "const_swap", issue = "83163")]
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
- let ptr = self.as_mut_ptr();
+ let this = self;
+ let ptr = this.as_mut_ptr();
// SAFETY: caller has to guarantee that `a < self.len()` and `b < self.len()`
unsafe {
- assert_unsafe_precondition!(a < self.len() && b < self.len());
+ assert_unsafe_precondition!(
+ "slice::swap_unchecked requires that the indices are within the slice",
+ [T](a: usize, b: usize, this: &mut [T]) => a < this.len() && b < this.len()
+ );
ptr::swap(ptr.add(a), ptr.add(b));
}
}
@@ -674,8 +671,9 @@ impl<T> [T] {
/// assert!(v == [3, 2, 1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_reverse", issue = "100784")]
#[inline]
- pub fn reverse(&mut self) {
+ pub const fn reverse(&mut self) {
let half_len = self.len() / 2;
let Range { start, end } = self.as_mut_ptr_range();
@@ -698,9 +696,9 @@ impl<T> [T] {
revswap(front_half, back_half, half_len);
#[inline]
- fn revswap<T>(a: &mut [T], b: &mut [T], n: usize) {
- debug_assert_eq!(a.len(), n);
- debug_assert_eq!(b.len(), n);
+ const fn revswap<T>(a: &mut [T], b: &mut [T], n: usize) {
+ debug_assert!(a.len() == n);
+ debug_assert!(b.len() == n);
// Because this function is first compiled in isolation,
// this check tells LLVM that the indexing below is
@@ -708,8 +706,10 @@ impl<T> [T] {
// lengths of the slices are known -- it's removed.
let (a, b) = (&mut a[..n], &mut b[..n]);
- for i in 0..n {
+ let mut i = 0;
+ while i < n {
mem::swap(&mut a[i], &mut b[n - 1 - i]);
+ i += 1;
}
}
}
@@ -969,9 +969,13 @@ impl<T> [T] {
#[inline]
#[must_use]
pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
+ let this = self;
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
- assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
+ assert_unsafe_precondition!(
+ "slice::as_chunks_unchecked requires `N != 0` and the slice to split exactly into `N`-element chunks",
+ [T](this: &[T], N: usize) => N != 0 && this.len() % N == 0
+ );
exact_div(self.len(), N)
};
// SAFETY: We cast a slice of `new_len * N` elements into
@@ -1108,10 +1112,14 @@ impl<T> [T] {
#[inline]
#[must_use]
pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
+ let this = &*self;
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
- assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
- exact_div(self.len(), N)
+ assert_unsafe_precondition!(
+ "slice::as_chunks_unchecked_mut requires `N != 0` and the slice to split exactly into `N`-element chunks",
+ [T](this: &[T], N: usize) => N != 0 && this.len() % N == 0
+ );
+ exact_div(this.len(), N)
};
// SAFETY: We cast a slice of `new_len * N` elements into
// a slice of `new_len` many `N` elements chunks.
@@ -1538,13 +1546,14 @@ impl<T> [T] {
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_slice_split_at_not_mut", issue = "101158")]
#[inline]
#[track_caller]
#[must_use]
- pub fn split_at(&self, mid: usize) -> (&[T], &[T]) {
+ pub const fn split_at(&self, mid: usize) -> (&[T], &[T]) {
assert!(mid <= self.len());
// SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which
- // fulfills the requirements of `from_raw_parts_mut`.
+ // fulfills the requirements of `split_at_unchecked`.
unsafe { self.split_at_unchecked(mid) }
}
@@ -1573,7 +1582,8 @@ impl<T> [T] {
#[inline]
#[track_caller]
#[must_use]
- pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
+ #[rustc_const_unstable(feature = "const_slice_split_at_mut", issue = "101804")]
+ pub const fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
assert!(mid <= self.len());
// SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which
// fulfills the requirements of `from_raw_parts_mut`.
@@ -1623,11 +1633,19 @@ impl<T> [T] {
/// }
/// ```
#[unstable(feature = "slice_split_at_unchecked", reason = "new API", issue = "76014")]
+ #[rustc_const_unstable(feature = "slice_split_at_unchecked", issue = "76014")]
#[inline]
#[must_use]
- pub unsafe fn split_at_unchecked(&self, mid: usize) -> (&[T], &[T]) {
+ pub const unsafe fn split_at_unchecked(&self, mid: usize) -> (&[T], &[T]) {
+ // HACK: the const function `from_raw_parts` is used to make this
+ // function const; previously the implementation used
+ // `(self.get_unchecked(..mid), self.get_unchecked(mid..))`
+
+ let len = self.len();
+ let ptr = self.as_ptr();
+
// SAFETY: Caller has to check that `0 <= mid <= self.len()`
- unsafe { (self.get_unchecked(..mid), self.get_unchecked(mid..)) }
+ unsafe { (from_raw_parts(ptr, mid), from_raw_parts(ptr.add(mid), len - mid)) }
}
/// Divides one mutable slice into two at an index, without doing bounds checking.
@@ -1664,9 +1682,10 @@ impl<T> [T] {
/// assert_eq!(v, [1, 2, 3, 4, 5, 6]);
/// ```
#[unstable(feature = "slice_split_at_unchecked", reason = "new API", issue = "76014")]
+ #[rustc_const_unstable(feature = "const_slice_split_at_mut", issue = "101804")]
#[inline]
#[must_use]
- pub unsafe fn split_at_mut_unchecked(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
+ pub const unsafe fn split_at_mut_unchecked(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
let len = self.len();
let ptr = self.as_mut_ptr();
@@ -1675,7 +1694,10 @@ impl<T> [T] {
// `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference
// is fine.
unsafe {
- assert_unsafe_precondition!(mid <= len);
+ assert_unsafe_precondition!(
+ "slice::split_at_mut_unchecked requires the index to be within the slice",
+ (mid: usize, len: usize) => mid <= len
+ );
(from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
}
}
@@ -2059,7 +2081,7 @@ impl<T> [T] {
SplitN::new(self.split(pred), n)
}
- /// Returns an iterator over subslices separated by elements that match
+ /// Returns an iterator over mutable subslices separated by elements that match
/// `pred`, limited to returning at most `n` items. The matched element is
/// not contained in the subslices.
///
@@ -2309,7 +2331,7 @@ impl<T> [T] {
}
/// Binary searches this slice for a given element.
- /// This behaves similary to [`contains`] if this slice is sorted.
+ /// This behaves similarly to [`contains`] if this slice is sorted.
///
/// If the value is found then [`Result::Ok`] is returned, containing the
/// index of the matching element. If there are multiple matches, then any
@@ -2342,6 +2364,28 @@ impl<T> [T] {
/// assert!(match r { Ok(1..=4) => true, _ => false, });
/// ```
///
+ /// If you want to find that whole *range* of matching items, rather than
+ /// an arbitrary matching one, that can be done using [`partition_point`]:
+ /// ```
+ /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55];
+ ///
+ /// let low = s.partition_point(|x| x < &1);
+ /// assert_eq!(low, 1);
+ /// let high = s.partition_point(|x| x <= &1);
+ /// assert_eq!(high, 5);
+ /// let r = s.binary_search(&1);
+ /// assert!((low..high).contains(&r.unwrap()));
+ ///
+ /// assert!(s[..low].iter().all(|&x| x < 1));
+ /// assert!(s[low..high].iter().all(|&x| x == 1));
+ /// assert!(s[high..].iter().all(|&x| x > 1));
+ ///
+ /// // For something not found, the "range" of equal items is empty
+ /// assert_eq!(s.partition_point(|x| x < &11), 9);
+ /// assert_eq!(s.partition_point(|x| x <= &11), 9);
+ /// assert_eq!(s.binary_search(&11), Err(9));
+ /// ```
+ ///
/// If you want to insert an item to a sorted vector, while maintaining
/// sort order, consider using [`partition_point`]:
///
@@ -2409,15 +2453,20 @@ impl<T> [T] {
where
F: FnMut(&'a T) -> Ordering,
{
+ // INVARIANTS:
+ // - 0 <= left <= left + size = right <= self.len()
+ // - f returns Less for everything in self[..left]
+ // - f returns Greater for everything in self[right..]
let mut size = self.len();
let mut left = 0;
let mut right = size;
while left < right {
let mid = left + size / 2;
- // SAFETY: the call is made safe by the following invariants:
- // - `mid >= 0`
- // - `mid < size`: `mid` is limited by `[left; right)` bound.
+ // SAFETY: the while condition means `size` is strictly positive, so
+ // `size/2 < size`. Thus `left + size/2 < left + size`, which
+ // coupled with the `left + size <= self.len()` invariant means
+ // we have `left + size/2 < self.len()`, and this is in-bounds.
let cmp = f(unsafe { self.get_unchecked(mid) });
// The reason why we use if/else control flow rather than match
@@ -2435,6 +2484,10 @@ impl<T> [T] {
size = right - left;
}
+
+ // SAFETY: directly true from the overall invariant.
+ // Note that this is `<=`, unlike the assume in the `Ok` path.
+ unsafe { crate::intrinsics::assume(left <= self.len()) };
Err(left)
}
@@ -2525,7 +2578,7 @@ impl<T> [T] {
where
T: Ord,
{
- sort::quicksort(self, |a, b| a.lt(b));
+ sort::quicksort(self, T::lt);
}
/// Sorts the slice with a comparator function, but might not preserve the order of equal
@@ -2628,9 +2681,10 @@ impl<T> [T] {
/// less than or equal to any value at a position `j > index`. Additionally, this reordering is
/// unstable (i.e. any number of equal elements may end up at position `index`), in-place
/// (i.e. does not allocate), and *O*(*n*) worst-case. This function is also/ known as "kth
- /// element" in other libraries. It returns a triplet of the following values: all elements less
- /// than the one at the given index, the value at the given index, and all elements greater than
- /// the one at the given index.
+ /// element" in other libraries. It returns a triplet of the following from the reordered slice:
+ /// the subslice prior to `index`, the element at `index`, and the subslice after `index`;
+ /// accordingly, the values in those two subslices will respectively all be less-than-or-equal-to
+ /// and greater-than-or-equal-to the value of the element at `index`.
///
/// # Current implementation
///
@@ -2664,8 +2718,7 @@ impl<T> [T] {
where
T: Ord,
{
- let mut f = |a: &T, b: &T| a.lt(b);
- sort::partition_at_index(self, index, &mut f)
+ sort::partition_at_index(self, index, T::lt)
}
/// Reorder the slice with a comparator function such that the element at `index` is at its
@@ -2675,10 +2728,11 @@ impl<T> [T] {
/// less than or equal to any value at a position `j > index` using the comparator function.
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
/// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
- /// is also known as "kth element" in other libraries. It returns a triplet of the following
- /// values: all elements less than the one at the given index, the value at the given index,
- /// and all elements greater than the one at the given index, using the provided comparator
- /// function.
+ /// is also known as "kth element" in other libraries. It returns a triplet of the following from
+ /// the slice reordered according to the provided comparator function: the subslice prior to
+ /// `index`, the element at `index`, and the subslice after `index`; accordingly, the values in
+ /// those two subslices will respectively all be less-than-or-equal-to and greater-than-or-equal-to
+ /// the value of the element at `index`.
///
/// # Current implementation
///
@@ -2716,8 +2770,7 @@ impl<T> [T] {
where
F: FnMut(&T, &T) -> Ordering,
{
- let mut f = |a: &T, b: &T| compare(a, b) == Less;
- sort::partition_at_index(self, index, &mut f)
+ sort::partition_at_index(self, index, |a: &T, b: &T| compare(a, b) == Less)
}
/// Reorder the slice with a key extraction function such that the element at `index` is at its
@@ -2727,10 +2780,11 @@ impl<T> [T] {
/// less than or equal to any value at a position `j > index` using the key extraction function.
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
/// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
- /// is also known as "kth element" in other libraries. It returns a triplet of the following
- /// values: all elements less than the one at the given index, the value at the given index, and
- /// all elements greater than the one at the given index, using the provided key extraction
- /// function.
+ /// is also known as "kth element" in other libraries. It returns a triplet of the following from
+ /// the slice reordered according to the provided key extraction function: the subslice prior to
+ /// `index`, the element at `index`, and the subslice after `index`; accordingly, the values in
+ /// those two subslices will respectively all be less-than-or-equal-to and greater-than-or-equal-to
+ /// the value of the element at `index`.
///
/// # Current implementation
///
@@ -2769,8 +2823,7 @@ impl<T> [T] {
F: FnMut(&T) -> K,
K: Ord,
{
- let mut g = |a: &T, b: &T| f(a).lt(&f(b));
- sort::partition_at_index(self, index, &mut g)
+ sort::partition_at_index(self, index, |a: &T, b: &T| f(a).lt(&f(b)))
}
/// Moves all consecutive repeated elements to the end of the slice according to the
@@ -2921,7 +2974,7 @@ impl<T> [T] {
let prev_ptr_write = ptr.add(next_write - 1);
if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
if next_read != next_write {
- let ptr_write = prev_ptr_write.offset(1);
+ let ptr_write = prev_ptr_write.add(1);
mem::swap(&mut *ptr_read, &mut *ptr_write);
}
next_write += 1;
@@ -3444,7 +3497,7 @@ impl<T> [T] {
#[must_use]
pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
// Note that most of this function will be constant-evaluated,
- if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
+ if U::IS_ZST || T::IS_ZST {
// handle ZSTs specially, which is – don't handle them at all.
return (self, &[], &[]);
}
@@ -3505,7 +3558,7 @@ impl<T> [T] {
#[must_use]
pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
// Note that most of this function will be constant-evaluated,
- if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
+ if U::IS_ZST || T::IS_ZST {
// handle ZSTs specially, which is – don't handle them at all.
return (self, &mut [], &mut []);
}
@@ -3518,7 +3571,7 @@ impl<T> [T] {
// alignment targeted for U.
// `crate::ptr::align_offset` is called with a correctly aligned and
// valid pointer `ptr` (it comes from a reference to `self`) and with
- // a size that is a power of two (since it comes from the alignement for U),
+ // a size that is a power of two (since it comes from the alignment for U),
// satisfying its safety constraints.
let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::<U>()) };
if offset > self.len() {
@@ -3761,6 +3814,16 @@ impl<T> [T] {
/// assert!(v[i..].iter().all(|&x| !(x < 5)));
/// ```
///
+ /// If all elements of the slice match the predicate, including if the slice
+ /// is empty, then the length of the slice will be returned:
+ ///
+ /// ```
+ /// let a = [2, 4, 8];
+ /// assert_eq!(a.partition_point(|x| x < &100), a.len());
+ /// let a: [i32; 0] = [];
+ /// assert_eq!(a.partition_point(|x| x < &100), 0);
+ /// ```
+ ///
/// If you want to insert an item to a sorted vector, while maintaining
/// sort order:
///
@@ -4051,7 +4114,7 @@ impl<T, const N: usize> [[T; N]] {
/// ```
#[unstable(feature = "slice_flatten", issue = "95629")]
pub fn flatten(&self) -> &[T] {
- let len = if crate::mem::size_of::<T>() == 0 {
+ let len = if T::IS_ZST {
self.len().checked_mul(N).expect("slice len overflow")
} else {
// SAFETY: `self.len() * N` cannot overflow because `self` is
@@ -4089,7 +4152,7 @@ impl<T, const N: usize> [[T; N]] {
/// ```
#[unstable(feature = "slice_flatten", issue = "95629")]
pub fn flatten_mut(&mut self) -> &mut [T] {
- let len = if crate::mem::size_of::<T>() == 0 {
+ let len = if T::IS_ZST {
self.len().checked_mul(N).expect("slice len overflow")
} else {
// SAFETY: `self.len() * N` cannot overflow because `self` is
@@ -4101,7 +4164,6 @@ impl<T, const N: usize> [[T; N]] {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(test))]
impl [f32] {
/// Sorts the slice of floats.
@@ -4131,7 +4193,6 @@ impl [f32] {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(test))]
impl [f64] {
/// Sorts the slice of floats.
diff --git a/library/core/src/slice/raw.rs b/library/core/src/slice/raw.rs
index 107e71ab6..052fd34d0 100644
--- a/library/core/src/slice/raw.rs
+++ b/library/core/src/slice/raw.rs
@@ -1,7 +1,9 @@
//! Free functions to create `&[T]` and `&mut [T]`.
use crate::array;
-use crate::intrinsics::{assert_unsafe_precondition, is_aligned_and_not_null};
+use crate::intrinsics::{
+ assert_unsafe_precondition, is_aligned_and_not_null, is_valid_allocation_size,
+};
use crate::ops::Range;
use crate::ptr;
@@ -91,8 +93,9 @@ pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T]
// SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
unsafe {
assert_unsafe_precondition!(
- is_aligned_and_not_null(data)
- && crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize
+ "slice::from_raw_parts requires the pointer to be aligned and non-null, and the total size of the slice not to exceed `isize::MAX`",
+ [T](data: *const T, len: usize) => is_aligned_and_not_null(data)
+ && is_valid_allocation_size::<T>(len)
);
&*ptr::slice_from_raw_parts(data, len)
}
@@ -135,8 +138,9 @@ pub const unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a m
// SAFETY: the caller must uphold the safety contract for `from_raw_parts_mut`.
unsafe {
assert_unsafe_precondition!(
- is_aligned_and_not_null(data)
- && crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize
+ "slice::from_raw_parts_mut requires the pointer to be aligned and non-null, and the total size of the slice not to exceed `isize::MAX`",
+ [T](data: *mut T, len: usize) => is_aligned_and_not_null(data)
+ && is_valid_allocation_size::<T>(len)
);
&mut *ptr::slice_from_raw_parts_mut(data, len)
}
@@ -188,6 +192,10 @@ pub const fn from_mut<T>(s: &mut T) -> &mut [T] {
///
/// Note that a range created from [`slice::as_ptr_range`] fulfills these requirements.
///
+/// # Panics
+///
+/// This function panics if `T` is a Zero-Sized Type (“ZST”).
+///
/// # Caveat
///
/// The lifetime for the returned slice is inferred from its usage. To
@@ -219,9 +227,15 @@ pub const unsafe fn from_ptr_range<'a, T>(range: Range<*const T>) -> &'a [T] {
unsafe { from_raw_parts(range.start, range.end.sub_ptr(range.start)) }
}
-/// Performs the same functionality as [`from_ptr_range`], except that a
+/// Forms a mutable slice from a pointer range.
+///
+/// This is the same functionality as [`from_ptr_range`], except that a
/// mutable slice is returned.
///
+/// This function is useful for interacting with foreign interfaces which
+/// use two pointers to refer to a range of elements in memory, as is
+/// common in C++.
+///
/// # Safety
///
/// Behavior is undefined if any of the following conditions are violated:
@@ -247,6 +261,18 @@ pub const unsafe fn from_ptr_range<'a, T>(range: Range<*const T>) -> &'a [T] {
///
/// Note that a range created from [`slice::as_mut_ptr_range`] fulfills these requirements.
///
+/// # Panics
+///
+/// This function panics if `T` is a Zero-Sized Type (“ZST”).
+///
+/// # Caveat
+///
+/// The lifetime for the returned slice is inferred from its usage. To
+/// prevent accidental misuse, it's suggested to tie the lifetime to whichever
+/// source lifetime is safe in the context, such as by providing a helper
+/// function taking the lifetime of a host value for the slice, or by explicit
+/// annotation.
+///
/// # Examples
///
/// ```
diff --git a/library/core/src/slice/rotate.rs b/library/core/src/slice/rotate.rs
index 4589c6c0f..fa8c238f8 100644
--- a/library/core/src/slice/rotate.rs
+++ b/library/core/src/slice/rotate.rs
@@ -1,5 +1,5 @@
use crate::cmp;
-use crate::mem::{self, MaybeUninit};
+use crate::mem::{self, MaybeUninit, SizedTypeProperties};
use crate::ptr;
/// Rotates the range `[mid-left, mid+right)` such that the element at `mid` becomes the first
@@ -63,7 +63,7 @@ use crate::ptr;
/// when `left < right` the swapping happens from the left instead.
pub unsafe fn ptr_rotate<T>(mut left: usize, mut mid: *mut T, mut right: usize) {
type BufType = [usize; 32];
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
return;
}
loop {
diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs
index 6a201834b..87f77b7f2 100644
--- a/library/core/src/slice/sort.rs
+++ b/library/core/src/slice/sort.rs
@@ -7,7 +7,7 @@
//! stable sorting implementation.
use crate::cmp;
-use crate::mem::{self, MaybeUninit};
+use crate::mem::{self, MaybeUninit, SizedTypeProperties};
use crate::ptr;
/// When dropped, copies from `src` into `dest`.
@@ -326,8 +326,8 @@ where
unsafe {
// Branchless comparison.
*end_l = i as u8;
- end_l = end_l.offset(!is_less(&*elem, pivot) as isize);
- elem = elem.offset(1);
+ end_l = end_l.add(!is_less(&*elem, pivot) as usize);
+ elem = elem.add(1);
}
}
}
@@ -352,9 +352,9 @@ where
// Plus, `block_r` was asserted to be less than `BLOCK` and `elem` will therefore at most be pointing to the beginning of the slice.
unsafe {
// Branchless comparison.
- elem = elem.offset(-1);
+ elem = elem.sub(1);
*end_r = i as u8;
- end_r = end_r.offset(is_less(&*elem, pivot) as isize);
+ end_r = end_r.add(is_less(&*elem, pivot) as usize);
}
}
}
@@ -365,12 +365,12 @@ where
if count > 0 {
macro_rules! left {
() => {
- l.offset(*start_l as isize)
+ l.add(usize::from(*start_l))
};
}
macro_rules! right {
() => {
- r.offset(-(*start_r as isize) - 1)
+ r.sub(usize::from(*start_r) + 1)
};
}
@@ -398,16 +398,16 @@ where
ptr::copy_nonoverlapping(right!(), left!(), 1);
for _ in 1..count {
- start_l = start_l.offset(1);
+ start_l = start_l.add(1);
ptr::copy_nonoverlapping(left!(), right!(), 1);
- start_r = start_r.offset(1);
+ start_r = start_r.add(1);
ptr::copy_nonoverlapping(right!(), left!(), 1);
}
ptr::copy_nonoverlapping(&tmp, right!(), 1);
mem::forget(tmp);
- start_l = start_l.offset(1);
- start_r = start_r.offset(1);
+ start_l = start_l.add(1);
+ start_r = start_r.add(1);
}
}
@@ -420,7 +420,7 @@ where
// safe. Otherwise, the debug assertions in the `is_done` case guarantee that
// `width(l, r) == block_l + block_r`, namely, that the block sizes have been adjusted to account
// for the smaller number of remaining elements.
- l = unsafe { l.offset(block_l as isize) };
+ l = unsafe { l.add(block_l) };
}
if start_r == end_r {
@@ -428,7 +428,7 @@ where
// SAFETY: Same argument as [block-width-guarantee]. Either this is a full block `2*BLOCK`-wide,
// or `block_r` has been adjusted for the last handful of elements.
- r = unsafe { r.offset(-(block_r as isize)) };
+ r = unsafe { r.sub(block_r) };
}
if is_done {
@@ -457,9 +457,9 @@ where
// - `offsets_l` contains valid offsets into `v` collected during the partitioning of
// the last block, so the `l.offset` calls are valid.
unsafe {
- end_l = end_l.offset(-1);
- ptr::swap(l.offset(*end_l as isize), r.offset(-1));
- r = r.offset(-1);
+ end_l = end_l.sub(1);
+ ptr::swap(l.add(usize::from(*end_l)), r.sub(1));
+ r = r.sub(1);
}
}
width(v.as_mut_ptr(), r)
@@ -470,9 +470,9 @@ where
while start_r < end_r {
// SAFETY: See the reasoning in [remaining-elements-safety].
unsafe {
- end_r = end_r.offset(-1);
- ptr::swap(l, r.offset(-(*end_r as isize) - 1));
- l = l.offset(1);
+ end_r = end_r.sub(1);
+ ptr::swap(l, r.sub(usize::from(*end_r) + 1));
+ l = l.add(1);
}
}
width(v.as_mut_ptr(), l)
@@ -813,7 +813,7 @@ where
F: FnMut(&T, &T) -> bool,
{
// Sorting has no meaningful behavior on zero-sized types.
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
return;
}
@@ -898,7 +898,7 @@ where
panic!("partition_at_index index {} greater than length of slice {}", index, v.len());
}
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// Sorting has no meaningful behavior on zero-sized types. Do nothing.
} else if index == v.len() - 1 {
// Find max element and place it in the last position of the array. We're free to use