diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-11 08:27:49 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-11 08:27:49 +0000 |
commit | ace9429bb58fd418f0c81d4c2835699bddf6bde6 (patch) | |
tree | b2d64bc10158fdd5497876388cd68142ca374ed3 /rust/alloc/vec | |
parent | Initial commit. (diff) | |
download | linux-ace9429bb58fd418f0c81d4c2835699bddf6bde6.tar.xz linux-ace9429bb58fd418f0c81d4c2835699bddf6bde6.zip |
Adding upstream version 6.6.15.upstream/6.6.15
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'rust/alloc/vec')
-rw-r--r-- | rust/alloc/vec/drain.rs | 255 | ||||
-rw-r--r-- | rust/alloc/vec/drain_filter.rs | 199 | ||||
-rw-r--r-- | rust/alloc/vec/into_iter.rs | 448 | ||||
-rw-r--r-- | rust/alloc/vec/is_zero.rs | 204 | ||||
-rw-r--r-- | rust/alloc/vec/mod.rs | 3563 | ||||
-rw-r--r-- | rust/alloc/vec/partial_eq.rs | 49 | ||||
-rw-r--r-- | rust/alloc/vec/set_len_on_drop.rs | 35 | ||||
-rw-r--r-- | rust/alloc/vec/spec_extend.rs | 119 |
8 files changed, 4872 insertions, 0 deletions
diff --git a/rust/alloc/vec/drain.rs b/rust/alloc/vec/drain.rs new file mode 100644 index 0000000000..78177a9e2a --- /dev/null +++ b/rust/alloc/vec/drain.rs @@ -0,0 +1,255 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +use crate::alloc::{Allocator, Global}; +use core::fmt; +use core::iter::{FusedIterator, TrustedLen}; +use core::mem::{self, ManuallyDrop, SizedTypeProperties}; +use core::ptr::{self, NonNull}; +use core::slice::{self}; + +use super::Vec; + +/// A draining iterator for `Vec<T>`. +/// +/// This `struct` is created by [`Vec::drain`]. +/// See its documentation for more. +/// +/// # Example +/// +/// ``` +/// let mut v = vec![0, 1, 2]; +/// let iter: std::vec::Drain<'_, _> = v.drain(..); +/// ``` +#[stable(feature = "drain", since = "1.6.0")] +pub struct Drain< + 'a, + T: 'a, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global, +> { + /// Index of tail to preserve + pub(super) tail_start: usize, + /// Length of tail + pub(super) tail_len: usize, + /// Current remaining range to remove + pub(super) iter: slice::Iter<'a, T>, + pub(super) vec: NonNull<Vec<T, A>>, +} + +#[stable(feature = "collection_debug", since = "1.17.0")] +impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() + } +} + +impl<'a, T, A: Allocator> Drain<'a, T, A> { + /// Returns the remaining items of this iterator as a slice. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec!['a', 'b', 'c']; + /// let mut drain = vec.drain(..); + /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']); + /// let _ = drain.next().unwrap(); + /// assert_eq!(drain.as_slice(), &['b', 'c']); + /// ``` + #[must_use] + #[stable(feature = "vec_drain_as_slice", since = "1.46.0")] + pub fn as_slice(&self) -> &[T] { + self.iter.as_slice() + } + + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[must_use] + #[inline] + pub fn allocator(&self) -> &A { + unsafe { self.vec.as_ref().allocator() } + } + + /// Keep unyielded elements in the source `Vec`. + /// + /// # Examples + /// + /// ``` + /// #![feature(drain_keep_rest)] + /// + /// let mut vec = vec!['a', 'b', 'c']; + /// let mut drain = vec.drain(..); + /// + /// assert_eq!(drain.next().unwrap(), 'a'); + /// + /// // This call keeps 'b' and 'c' in the vec. + /// drain.keep_rest(); + /// + /// // If we wouldn't call `keep_rest()`, + /// // `vec` would be empty. + /// assert_eq!(vec, ['b', 'c']); + /// ``` + #[unstable(feature = "drain_keep_rest", issue = "101122")] + pub fn keep_rest(self) { + // At this moment layout looks like this: + // + // [head] [yielded by next] [unyielded] [yielded by next_back] [tail] + // ^-- start \_________/-- unyielded_len \____/-- self.tail_len + // ^-- unyielded_ptr ^-- tail + // + // Normally `Drop` impl would drop [unyielded] and then move [tail] to the `start`. + // Here we want to + // 1. Move [unyielded] to `start` + // 2. Move [tail] to a new start at `start + len(unyielded)` + // 3. Update length of the original vec to `len(head) + len(unyielded) + len(tail)` + // a. In case of ZST, this is the only thing we want to do + // 4. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do + let mut this = ManuallyDrop::new(self); + + unsafe { + let source_vec = this.vec.as_mut(); + + let start = source_vec.len(); + let tail = this.tail_start; + + let unyielded_len = this.iter.len(); + let unyielded_ptr = this.iter.as_slice().as_ptr(); + + // ZSTs have no identity, so we don't need to move them around. + if !T::IS_ZST { + let start_ptr = source_vec.as_mut_ptr().add(start); + + // memmove back unyielded elements + if unyielded_ptr != start_ptr { + let src = unyielded_ptr; + let dst = start_ptr; + + ptr::copy(src, dst, unyielded_len); + } + + // memmove back untouched tail + if tail != (start + unyielded_len) { + let src = source_vec.as_ptr().add(tail); + let dst = start_ptr.add(unyielded_len); + ptr::copy(src, dst, this.tail_len); + } + } + + source_vec.set_len(start + unyielded_len + this.tail_len); + } + } +} + +#[stable(feature = "vec_drain_as_slice", since = "1.46.0")] +impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> { + fn as_ref(&self) -> &[T] { + self.as_slice() + } +} + +#[stable(feature = "drain", since = "1.6.0")] +unsafe impl<T: Sync, A: Sync + Allocator> Sync for Drain<'_, T, A> {} +#[stable(feature = "drain", since = "1.6.0")] +unsafe impl<T: Send, A: Send + Allocator> Send for Drain<'_, T, A> {} + +#[stable(feature = "drain", since = "1.6.0")] +impl<T, A: Allocator> Iterator for Drain<'_, T, A> { + type Item = T; + + #[inline] + fn next(&mut self) -> Option<T> { + self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) }) + } + + fn size_hint(&self) -> (usize, Option<usize>) { + self.iter.size_hint() + } +} + +#[stable(feature = "drain", since = "1.6.0")] +impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> { + #[inline] + fn next_back(&mut self) -> Option<T> { + self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) }) + } +} + +#[stable(feature = "drain", since = "1.6.0")] +impl<T, A: Allocator> Drop for Drain<'_, T, A> { + fn drop(&mut self) { + /// Moves back the un-`Drain`ed elements to restore the original `Vec`. + struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>); + + impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> { + fn drop(&mut self) { + if self.0.tail_len > 0 { + unsafe { + let source_vec = self.0.vec.as_mut(); + // memmove back untouched tail, update to new length + let start = source_vec.len(); + let tail = self.0.tail_start; + if tail != start { + let src = source_vec.as_ptr().add(tail); + let dst = source_vec.as_mut_ptr().add(start); + ptr::copy(src, dst, self.0.tail_len); + } + source_vec.set_len(start + self.0.tail_len); + } + } + } + } + + let iter = mem::take(&mut self.iter); + let drop_len = iter.len(); + + let mut vec = self.vec; + + if T::IS_ZST { + // ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount. + // this can be achieved by manipulating the Vec length instead of moving values out from `iter`. + unsafe { + let vec = vec.as_mut(); + let old_len = vec.len(); + vec.set_len(old_len + drop_len + self.tail_len); + vec.truncate(old_len + self.tail_len); + } + + return; + } + + // ensure elements are moved back into their appropriate places, even when drop_in_place panics + let _guard = DropGuard(self); + + if drop_len == 0 { + return; + } + + // as_slice() must only be called when iter.len() is > 0 because + // it also gets touched by vec::Splice which may turn it into a dangling pointer + // which would make it and the vec pointer point to different allocations which would + // lead to invalid pointer arithmetic below. + let drop_ptr = iter.as_slice().as_ptr(); + + unsafe { + // drop_ptr comes from a slice::Iter which only gives us a &[T] but for drop_in_place + // a pointer with mutable provenance is necessary. Therefore we must reconstruct + // it from the original vec but also avoid creating a &mut to the front since that could + // invalidate raw pointers to it which some unsafe code might rely on. + let vec_ptr = vec.as_mut().as_mut_ptr(); + let drop_offset = drop_ptr.sub_ptr(vec_ptr); + let to_drop = ptr::slice_from_raw_parts_mut(vec_ptr.add(drop_offset), drop_len); + ptr::drop_in_place(to_drop); + } + } +} + +#[stable(feature = "drain", since = "1.6.0")] +impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> { + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} + +#[unstable(feature = "trusted_len", issue = "37572")] +unsafe impl<T, A: Allocator> TrustedLen for Drain<'_, T, A> {} + +#[stable(feature = "fused", since = "1.26.0")] +impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {} diff --git a/rust/alloc/vec/drain_filter.rs b/rust/alloc/vec/drain_filter.rs new file mode 100644 index 0000000000..09efff090e --- /dev/null +++ b/rust/alloc/vec/drain_filter.rs @@ -0,0 +1,199 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +use crate::alloc::{Allocator, Global}; +use core::mem::{ManuallyDrop, SizedTypeProperties}; +use core::ptr; +use core::slice; + +use super::Vec; + +/// An iterator which uses a closure to determine if an element should be removed. +/// +/// This struct is created by [`Vec::drain_filter`]. +/// See its documentation for more. +/// +/// # Example +/// +/// ``` +/// #![feature(drain_filter)] +/// +/// let mut v = vec![0, 1, 2]; +/// let iter: std::vec::DrainFilter<'_, _, _> = v.drain_filter(|x| *x % 2 == 0); +/// ``` +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +#[derive(Debug)] +pub struct DrainFilter< + 'a, + T, + F, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> where + F: FnMut(&mut T) -> bool, +{ + pub(super) vec: &'a mut Vec<T, A>, + /// The index of the item that will be inspected by the next call to `next`. + pub(super) idx: usize, + /// The number of items that have been drained (removed) thus far. + pub(super) del: usize, + /// The original length of `vec` prior to draining. + pub(super) old_len: usize, + /// The filter test predicate. + pub(super) pred: F, + /// A flag that indicates a panic has occurred in the filter test predicate. + /// This is used as a hint in the drop implementation to prevent consumption + /// of the remainder of the `DrainFilter`. Any unprocessed items will be + /// backshifted in the `vec`, but no further items will be dropped or + /// tested by the filter predicate. + pub(super) panic_flag: bool, +} + +impl<T, F, A: Allocator> DrainFilter<'_, T, F, A> +where + F: FnMut(&mut T) -> bool, +{ + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn allocator(&self) -> &A { + self.vec.allocator() + } + + /// Keep unyielded elements in the source `Vec`. + /// + /// # Examples + /// + /// ``` + /// #![feature(drain_filter)] + /// #![feature(drain_keep_rest)] + /// + /// let mut vec = vec!['a', 'b', 'c']; + /// let mut drain = vec.drain_filter(|_| true); + /// + /// assert_eq!(drain.next().unwrap(), 'a'); + /// + /// // This call keeps 'b' and 'c' in the vec. + /// drain.keep_rest(); + /// + /// // If we wouldn't call `keep_rest()`, + /// // `vec` would be empty. + /// assert_eq!(vec, ['b', 'c']); + /// ``` + #[unstable(feature = "drain_keep_rest", issue = "101122")] + pub fn keep_rest(self) { + // At this moment layout looks like this: + // + // _____________________/-- old_len + // / \ + // [kept] [yielded] [tail] + // \_______/ ^-- idx + // \-- del + // + // Normally `Drop` impl would drop [tail] (via .for_each(drop), ie still calling `pred`) + // + // 1. Move [tail] after [kept] + // 2. Update length of the original vec to `old_len - del` + // a. In case of ZST, this is the only thing we want to do + // 3. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do + let mut this = ManuallyDrop::new(self); + + unsafe { + // ZSTs have no identity, so we don't need to move them around. + if !T::IS_ZST && this.idx < this.old_len && this.del > 0 { + let ptr = this.vec.as_mut_ptr(); + let src = ptr.add(this.idx); + let dst = src.sub(this.del); + let tail_len = this.old_len - this.idx; + src.copy_to(dst, tail_len); + } + + let new_len = this.old_len - this.del; + this.vec.set_len(new_len); + } + } +} + +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A> +where + F: FnMut(&mut T) -> bool, +{ + type Item = T; + + fn next(&mut self) -> Option<T> { + unsafe { + while self.idx < self.old_len { + let i = self.idx; + let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len); + self.panic_flag = true; + let drained = (self.pred)(&mut v[i]); + self.panic_flag = false; + // Update the index *after* the predicate is called. If the index + // is updated prior and the predicate panics, the element at this + // index would be leaked. + self.idx += 1; + if drained { + self.del += 1; + return Some(ptr::read(&v[i])); + } else if self.del > 0 { + let del = self.del; + let src: *const T = &v[i]; + let dst: *mut T = &mut v[i - del]; + ptr::copy_nonoverlapping(src, dst, 1); + } + } + None + } + } + + fn size_hint(&self) -> (usize, Option<usize>) { + (0, Some(self.old_len - self.idx)) + } +} + +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A> +where + F: FnMut(&mut T) -> bool, +{ + fn drop(&mut self) { + struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator> + where + F: FnMut(&mut T) -> bool, + { + drain: &'b mut DrainFilter<'a, T, F, A>, + } + + impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A> + where + F: FnMut(&mut T) -> bool, + { + fn drop(&mut self) { + unsafe { + if self.drain.idx < self.drain.old_len && self.drain.del > 0 { + // This is a pretty messed up state, and there isn't really an + // obviously right thing to do. We don't want to keep trying + // to execute `pred`, so we just backshift all the unprocessed + // elements and tell the vec that they still exist. The backshift + // is required to prevent a double-drop of the last successfully + // drained item prior to a panic in the predicate. + let ptr = self.drain.vec.as_mut_ptr(); + let src = ptr.add(self.drain.idx); + let dst = src.sub(self.drain.del); + let tail_len = self.drain.old_len - self.drain.idx; + src.copy_to(dst, tail_len); + } + self.drain.vec.set_len(self.drain.old_len - self.drain.del); + } + } + } + + let backshift = BackshiftOnDrop { drain: self }; + + // Attempt to consume any remaining elements if the filter predicate + // has not yet panicked. We'll backshift any remaining elements + // whether we've already panicked or if the consumption here panics. + if !backshift.drain.panic_flag { + backshift.drain.for_each(drop); + } + } +} diff --git a/rust/alloc/vec/into_iter.rs b/rust/alloc/vec/into_iter.rs new file mode 100644 index 0000000000..aac0ec16ae --- /dev/null +++ b/rust/alloc/vec/into_iter.rs @@ -0,0 +1,448 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +#[cfg(not(no_global_oom_handling))] +use super::AsVecIntoIter; +use crate::alloc::{Allocator, Global}; +#[cfg(not(no_global_oom_handling))] +use crate::collections::VecDeque; +use crate::raw_vec::RawVec; +use core::array; +use core::fmt; +use core::iter::{ + FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce, +}; +use core::marker::PhantomData; +use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; +use core::num::NonZeroUsize; +#[cfg(not(no_global_oom_handling))] +use core::ops::Deref; +use core::ptr::{self, NonNull}; +use core::slice::{self}; + +/// An iterator that moves out of a vector. +/// +/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec) +/// (provided by the [`IntoIterator`] trait). +/// +/// # Example +/// +/// ``` +/// let v = vec![0, 1, 2]; +/// let iter: std::vec::IntoIter<_> = v.into_iter(); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +#[rustc_insignificant_dtor] +pub struct IntoIter< + T, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { + pub(super) buf: NonNull<T>, + pub(super) phantom: PhantomData<T>, + pub(super) cap: usize, + // the drop impl reconstructs a RawVec from buf, cap and alloc + // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop + pub(super) alloc: ManuallyDrop<A>, + pub(super) ptr: *const T, + pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that + // ptr == end is a quick test for the Iterator being empty, that works + // for both ZST and non-ZST. +} + +#[stable(feature = "vec_intoiter_debug", since = "1.13.0")] +impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("IntoIter").field(&self.as_slice()).finish() + } +} + +impl<T, A: Allocator> IntoIter<T, A> { + /// Returns the remaining items of this iterator as a slice. + /// + /// # Examples + /// + /// ``` + /// let vec = vec!['a', 'b', 'c']; + /// let mut into_iter = vec.into_iter(); + /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); + /// let _ = into_iter.next().unwrap(); + /// assert_eq!(into_iter.as_slice(), &['b', 'c']); + /// ``` + #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] + pub fn as_slice(&self) -> &[T] { + unsafe { slice::from_raw_parts(self.ptr, self.len()) } + } + + /// Returns the remaining items of this iterator as a mutable slice. + /// + /// # Examples + /// + /// ``` + /// let vec = vec!['a', 'b', 'c']; + /// let mut into_iter = vec.into_iter(); + /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); + /// into_iter.as_mut_slice()[2] = 'z'; + /// assert_eq!(into_iter.next().unwrap(), 'a'); + /// assert_eq!(into_iter.next().unwrap(), 'b'); + /// assert_eq!(into_iter.next().unwrap(), 'z'); + /// ``` + #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")] + pub fn as_mut_slice(&mut self) -> &mut [T] { + unsafe { &mut *self.as_raw_mut_slice() } + } + + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn allocator(&self) -> &A { + &self.alloc + } + + fn as_raw_mut_slice(&mut self) -> *mut [T] { + ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len()) + } + + /// Drops remaining elements and relinquishes the backing allocation. + /// This method guarantees it won't panic before relinquishing + /// the backing allocation. + /// + /// This is roughly equivalent to the following, but more efficient + /// + /// ``` + /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter(); + /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter()); + /// (&mut into_iter).for_each(drop); + /// std::mem::forget(into_iter); + /// ``` + /// + /// This method is used by in-place iteration, refer to the vec::in_place_collect + /// documentation for an overview. + #[cfg(not(no_global_oom_handling))] + pub(super) fn forget_allocation_drop_remaining(&mut self) { + let remaining = self.as_raw_mut_slice(); + + // overwrite the individual fields instead of creating a new + // struct and then overwriting &mut self. + // this creates less assembly + self.cap = 0; + self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) }; + self.ptr = self.buf.as_ptr(); + self.end = self.buf.as_ptr(); + + // Dropping the remaining elements can panic, so this needs to be + // done only after updating the other fields. + unsafe { + ptr::drop_in_place(remaining); + } + } + + /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed. + pub(crate) fn forget_remaining_elements(&mut self) { + // For th ZST case, it is crucial that we mutate `end` here, not `ptr`. + // `ptr` must stay aligned, while `end` may be unaligned. + self.end = self.ptr; + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> { + // Keep our `Drop` impl from dropping the elements and the allocator + let mut this = ManuallyDrop::new(self); + + // SAFETY: This allocation originally came from a `Vec`, so it passes + // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`, + // so the `sub_ptr`s below cannot wrap, and will produce a well-formed + // range. `end` ≤ `buf + cap`, so the range will be in-bounds. + // Taking `alloc` is ok because nothing else is going to look at it, + // since our `Drop` impl isn't going to run so there's no more code. + unsafe { + let buf = this.buf.as_ptr(); + let initialized = if T::IS_ZST { + // All the pointers are the same for ZSTs, so it's fine to + // say that they're all at the beginning of the "allocation". + 0..this.len() + } else { + this.ptr.sub_ptr(buf)..this.end.sub_ptr(buf) + }; + let cap = this.cap; + let alloc = ManuallyDrop::take(&mut this.alloc); + VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc) + } + } +} + +#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")] +impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> { + fn as_ref(&self) -> &[T] { + self.as_slice() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {} +#[stable(feature = "rust1", since = "1.0.0")] +unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> Iterator for IntoIter<T, A> { + type Item = T; + + #[inline] + fn next(&mut self) -> Option<T> { + if self.ptr == self.end { + None + } else if T::IS_ZST { + // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by + // reducing the `end`. + self.end = self.end.wrapping_byte_sub(1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } else { + let old = self.ptr; + self.ptr = unsafe { self.ptr.add(1) }; + + Some(unsafe { ptr::read(old) }) + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option<usize>) { + let exact = if T::IS_ZST { + self.end.addr().wrapping_sub(self.ptr.addr()) + } else { + unsafe { self.end.sub_ptr(self.ptr) } + }; + (exact, Some(exact)) + } + + #[inline] + fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> { + let step_size = self.len().min(n); + let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size); + if T::IS_ZST { + // See `next` for why we sub `end` here. + self.end = self.end.wrapping_byte_sub(step_size); + } else { + // SAFETY: the min() above ensures that step_size is in bounds + self.ptr = unsafe { self.ptr.add(step_size) }; + } + // SAFETY: the min() above ensures that step_size is in bounds + unsafe { + ptr::drop_in_place(to_drop); + } + NonZeroUsize::new(n - step_size).map_or(Ok(()), Err) + } + + #[inline] + fn count(self) -> usize { + self.len() + } + + #[inline] + fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> { + let mut raw_ary = MaybeUninit::uninit_array(); + + let len = self.len(); + + if T::IS_ZST { + if len < N { + self.forget_remaining_elements(); + // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct + return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) }); + } + + self.end = self.end.wrapping_byte_sub(N); + // Safety: ditto + return Ok(unsafe { raw_ary.transpose().assume_init() }); + } + + if len < N { + // Safety: `len` indicates that this many elements are available and we just checked that + // it fits into the array. + unsafe { + ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len); + self.forget_remaining_elements(); + return Err(array::IntoIter::new_unchecked(raw_ary, 0..len)); + } + } + + // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize + // the array. + return unsafe { + ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N); + self.ptr = self.ptr.add(N); + Ok(raw_ary.transpose().assume_init()) + }; + } + + unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item + where + Self: TrustedRandomAccessNoCoerce, + { + // SAFETY: the caller must guarantee that `i` is in bounds of the + // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)` + // is guaranteed to pointer to an element of the `Vec<T>` and + // thus guaranteed to be valid to dereference. + // + // Also note the implementation of `Self: TrustedRandomAccess` requires + // that `T: Copy` so reading elements from the buffer doesn't invalidate + // them for `Drop`. + unsafe { + if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> { + #[inline] + fn next_back(&mut self) -> Option<T> { + if self.end == self.ptr { + None + } else if T::IS_ZST { + // See above for why 'ptr.offset' isn't used + self.end = self.end.wrapping_byte_sub(1); + + // Make up a value of this ZST. + Some(unsafe { mem::zeroed() }) + } else { + self.end = unsafe { self.end.sub(1) }; + + Some(unsafe { ptr::read(self.end) }) + } + } + + #[inline] + fn advance_back_by(&mut self, n: usize) -> Result<(), NonZeroUsize> { + let step_size = self.len().min(n); + if T::IS_ZST { + // SAFETY: same as for advance_by() + self.end = self.end.wrapping_byte_sub(step_size); + } else { + // SAFETY: same as for advance_by() + self.end = unsafe { self.end.sub(step_size) }; + } + let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size); + // SAFETY: same as for advance_by() + unsafe { + ptr::drop_in_place(to_drop); + } + NonZeroUsize::new(n - step_size).map_or(Ok(()), Err) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> { + fn is_empty(&self) -> bool { + self.ptr == self.end + } +} + +#[stable(feature = "fused", since = "1.26.0")] +impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {} + +#[unstable(feature = "trusted_len", issue = "37572")] +unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {} + +#[stable(feature = "default_iters", since = "1.70.0")] +impl<T, A> Default for IntoIter<T, A> +where + A: Allocator + Default, +{ + /// Creates an empty `vec::IntoIter`. + /// + /// ``` + /// # use std::vec; + /// let iter: vec::IntoIter<u8> = Default::default(); + /// assert_eq!(iter.len(), 0); + /// assert_eq!(iter.as_slice(), &[]); + /// ``` + fn default() -> Self { + super::Vec::new_in(Default::default()).into_iter() + } +} + +#[doc(hidden)] +#[unstable(issue = "none", feature = "std_internals")] +#[rustc_unsafe_specialization_marker] +pub trait NonDrop {} + +// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr +// and thus we can't implement drop-handling +#[unstable(issue = "none", feature = "std_internals")] +impl<T: Copy> NonDrop for T {} + +#[doc(hidden)] +#[unstable(issue = "none", feature = "std_internals")] +// TrustedRandomAccess (without NoCoerce) must not be implemented because +// subtypes/supertypes of `T` might not be `NonDrop` +unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A> +where + T: NonDrop, +{ + const MAY_HAVE_SIDE_EFFECT: bool = false; +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "vec_into_iter_clone", since = "1.8.0")] +impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> { + #[cfg(not(test))] + fn clone(&self) -> Self { + self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter() + } + #[cfg(test)] + fn clone(&self) -> Self { + crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> { + fn drop(&mut self) { + struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>); + + impl<T, A: Allocator> Drop for DropGuard<'_, T, A> { + fn drop(&mut self) { + unsafe { + // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec + let alloc = ManuallyDrop::take(&mut self.0.alloc); + // RawVec handles deallocation + let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc); + } + } + } + + let guard = DropGuard(self); + // destroy the remaining elements + unsafe { + ptr::drop_in_place(guard.0.as_raw_mut_slice()); + } + // now `guard` will be dropped and do the rest + } +} + +// In addition to the SAFETY invariants of the following three unsafe traits +// also refer to the vec::in_place_collect module documentation to get an overview +#[unstable(issue = "none", feature = "inplace_iteration")] +#[doc(hidden)] +unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {} + +#[unstable(issue = "none", feature = "inplace_iteration")] +#[doc(hidden)] +unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> { + type Source = Self; + + #[inline] + unsafe fn as_inner(&mut self) -> &mut Self::Source { + self + } +} + +#[cfg(not(no_global_oom_handling))] +unsafe impl<T> AsVecIntoIter for IntoIter<T> { + type Item = T; + + fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> { + self + } +} diff --git a/rust/alloc/vec/is_zero.rs b/rust/alloc/vec/is_zero.rs new file mode 100644 index 0000000000..d928dcf90e --- /dev/null +++ b/rust/alloc/vec/is_zero.rs @@ -0,0 +1,204 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +use core::num::{Saturating, Wrapping}; + +use crate::boxed::Box; + +#[rustc_specialization_trait] +pub(super) unsafe trait IsZero { + /// Whether this value's representation is all zeros, + /// or can be represented with all zeroes. + fn is_zero(&self) -> bool; +} + +macro_rules! impl_is_zero { + ($t:ty, $is_zero:expr) => { + unsafe impl IsZero for $t { + #[inline] + fn is_zero(&self) -> bool { + $is_zero(*self) + } + } + }; +} + +impl_is_zero!(i8, |x| x == 0); // It is needed to impl for arrays and tuples of i8. +impl_is_zero!(i16, |x| x == 0); +impl_is_zero!(i32, |x| x == 0); +impl_is_zero!(i64, |x| x == 0); +impl_is_zero!(i128, |x| x == 0); +impl_is_zero!(isize, |x| x == 0); + +impl_is_zero!(u8, |x| x == 0); // It is needed to impl for arrays and tuples of u8. +impl_is_zero!(u16, |x| x == 0); +impl_is_zero!(u32, |x| x == 0); +impl_is_zero!(u64, |x| x == 0); +impl_is_zero!(u128, |x| x == 0); +impl_is_zero!(usize, |x| x == 0); + +impl_is_zero!(bool, |x| x == false); +impl_is_zero!(char, |x| x == '\0'); + +impl_is_zero!(f32, |x: f32| x.to_bits() == 0); +impl_is_zero!(f64, |x: f64| x.to_bits() == 0); + +unsafe impl<T> IsZero for *const T { + #[inline] + fn is_zero(&self) -> bool { + (*self).is_null() + } +} + +unsafe impl<T> IsZero for *mut T { + #[inline] + fn is_zero(&self) -> bool { + (*self).is_null() + } +} + +unsafe impl<T: IsZero, const N: usize> IsZero for [T; N] { + #[inline] + fn is_zero(&self) -> bool { + // Because this is generated as a runtime check, it's not obvious that + // it's worth doing if the array is really long. The threshold here + // is largely arbitrary, but was picked because as of 2022-07-01 LLVM + // fails to const-fold the check in `vec![[1; 32]; n]` + // See https://github.com/rust-lang/rust/pull/97581#issuecomment-1166628022 + // Feel free to tweak if you have better evidence. + + N <= 16 && self.iter().all(IsZero::is_zero) + } +} + +// This is recursive macro. +macro_rules! impl_for_tuples { + // Stopper + () => { + // No use for implementing for empty tuple because it is ZST. + }; + ($first_arg:ident $(,$rest:ident)*) => { + unsafe impl <$first_arg: IsZero, $($rest: IsZero,)*> IsZero for ($first_arg, $($rest,)*){ + #[inline] + fn is_zero(&self) -> bool{ + // Destructure tuple to N references + // Rust allows to hide generic params by local variable names. + #[allow(non_snake_case)] + let ($first_arg, $($rest,)*) = self; + + $first_arg.is_zero() + $( && $rest.is_zero() )* + } + } + + impl_for_tuples!($($rest),*); + } +} + +impl_for_tuples!(A, B, C, D, E, F, G, H); + +// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null. +// For fat pointers, the bytes that would be the pointer metadata in the `Some` +// variant are padding in the `None` variant, so ignoring them and +// zero-initializing instead is ok. +// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of +// `SpecFromElem`. + +unsafe impl<T: ?Sized> IsZero for Option<&T> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + +unsafe impl<T: ?Sized> IsZero for Option<Box<T>> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } +} + +// `Option<num::NonZeroU32>` and similar have a representation guarantee that +// they're the same size as the corresponding `u32` type, as well as a guarantee +// that transmuting between `NonZeroU32` and `Option<num::NonZeroU32>` works. +// While the documentation officially makes it UB to transmute from `None`, +// we're the standard library so we can make extra inferences, and we know that +// the only niche available to represent `None` is the one that's all zeros. + +macro_rules! impl_is_zero_option_of_nonzero { + ($($t:ident,)+) => {$( + unsafe impl IsZero for Option<core::num::$t> { + #[inline] + fn is_zero(&self) -> bool { + self.is_none() + } + } + )+}; +} + +impl_is_zero_option_of_nonzero!( + NonZeroU8, + NonZeroU16, + NonZeroU32, + NonZeroU64, + NonZeroU128, + NonZeroI8, + NonZeroI16, + NonZeroI32, + NonZeroI64, + NonZeroI128, + NonZeroUsize, + NonZeroIsize, +); + +macro_rules! impl_is_zero_option_of_num { + ($($t:ty,)+) => {$( + unsafe impl IsZero for Option<$t> { + #[inline] + fn is_zero(&self) -> bool { + const { + let none: Self = unsafe { core::mem::MaybeUninit::zeroed().assume_init() }; + assert!(none.is_none()); + } + self.is_none() + } + } + )+}; +} + +impl_is_zero_option_of_num!(u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, usize, isize,); + +unsafe impl<T: IsZero> IsZero for Wrapping<T> { + #[inline] + fn is_zero(&self) -> bool { + self.0.is_zero() + } +} + +unsafe impl<T: IsZero> IsZero for Saturating<T> { + #[inline] + fn is_zero(&self) -> bool { + self.0.is_zero() + } +} + +macro_rules! impl_for_optional_bool { + ($($t:ty,)+) => {$( + unsafe impl IsZero for $t { + #[inline] + fn is_zero(&self) -> bool { + // SAFETY: This is *not* a stable layout guarantee, but + // inside `core` we're allowed to rely on the current rustc + // behaviour that options of bools will be one byte with + // no padding, so long as they're nested less than 254 deep. + let raw: u8 = unsafe { core::mem::transmute(*self) }; + raw == 0 + } + } + )+}; +} +impl_for_optional_bool! { + Option<bool>, + Option<Option<bool>>, + Option<Option<Option<bool>>>, + // Could go further, but not worth the metadata overhead +} diff --git a/rust/alloc/vec/mod.rs b/rust/alloc/vec/mod.rs new file mode 100644 index 0000000000..05c70de022 --- /dev/null +++ b/rust/alloc/vec/mod.rs @@ -0,0 +1,3563 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +//! A contiguous growable array type with heap-allocated contents, written +//! `Vec<T>`. +//! +//! Vectors have *O*(1) indexing, amortized *O*(1) push (to the end) and +//! *O*(1) pop (from the end). +//! +//! Vectors ensure they never allocate more than `isize::MAX` bytes. +//! +//! # Examples +//! +//! You can explicitly create a [`Vec`] with [`Vec::new`]: +//! +//! ``` +//! let v: Vec<i32> = Vec::new(); +//! ``` +//! +//! ...or by using the [`vec!`] macro: +//! +//! ``` +//! let v: Vec<i32> = vec![]; +//! +//! let v = vec![1, 2, 3, 4, 5]; +//! +//! let v = vec![0; 10]; // ten zeroes +//! ``` +//! +//! You can [`push`] values onto the end of a vector (which will grow the vector +//! as needed): +//! +//! ``` +//! let mut v = vec![1, 2]; +//! +//! v.push(3); +//! ``` +//! +//! Popping values works in much the same way: +//! +//! ``` +//! let mut v = vec![1, 2]; +//! +//! let two = v.pop(); +//! ``` +//! +//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits): +//! +//! ``` +//! let mut v = vec![1, 2, 3]; +//! let three = v[2]; +//! v[1] = v[1] + 5; +//! ``` +//! +//! [`push`]: Vec::push + +#![stable(feature = "rust1", since = "1.0.0")] + +#[cfg(not(no_global_oom_handling))] +use core::cmp; +use core::cmp::Ordering; +use core::fmt; +use core::hash::{Hash, Hasher}; +use core::iter; +use core::marker::PhantomData; +use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; +use core::ops::{self, Index, IndexMut, Range, RangeBounds}; +use core::ptr::{self, NonNull}; +use core::slice::{self, SliceIndex}; + +use crate::alloc::{Allocator, Global}; +#[cfg(not(no_borrow))] +use crate::borrow::{Cow, ToOwned}; +use crate::boxed::Box; +use crate::collections::{TryReserveError, TryReserveErrorKind}; +use crate::raw_vec::RawVec; + +#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] +pub use self::drain_filter::DrainFilter; + +mod drain_filter; + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "vec_splice", since = "1.21.0")] +pub use self::splice::Splice; + +#[cfg(not(no_global_oom_handling))] +mod splice; + +#[stable(feature = "drain", since = "1.6.0")] +pub use self::drain::Drain; + +mod drain; + +#[cfg(not(no_borrow))] +#[cfg(not(no_global_oom_handling))] +mod cow; + +#[cfg(not(no_global_oom_handling))] +pub(crate) use self::in_place_collect::AsVecIntoIter; +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::into_iter::IntoIter; + +mod into_iter; + +#[cfg(not(no_global_oom_handling))] +use self::is_zero::IsZero; + +mod is_zero; + +#[cfg(not(no_global_oom_handling))] +mod in_place_collect; + +mod partial_eq; + +#[cfg(not(no_global_oom_handling))] +use self::spec_from_elem::SpecFromElem; + +#[cfg(not(no_global_oom_handling))] +mod spec_from_elem; + +use self::set_len_on_drop::SetLenOnDrop; + +mod set_len_on_drop; + +#[cfg(not(no_global_oom_handling))] +use self::in_place_drop::{InPlaceDrop, InPlaceDstBufDrop}; + +#[cfg(not(no_global_oom_handling))] +mod in_place_drop; + +#[cfg(not(no_global_oom_handling))] +use self::spec_from_iter_nested::SpecFromIterNested; + +#[cfg(not(no_global_oom_handling))] +mod spec_from_iter_nested; + +#[cfg(not(no_global_oom_handling))] +use self::spec_from_iter::SpecFromIter; + +#[cfg(not(no_global_oom_handling))] +mod spec_from_iter; + +#[cfg(not(no_global_oom_handling))] +use self::spec_extend::SpecExtend; + +use self::spec_extend::TrySpecExtend; + +mod spec_extend; + +/// A contiguous growable array type, written as `Vec<T>`, short for 'vector'. +/// +/// # Examples +/// +/// ``` +/// let mut vec = Vec::new(); +/// vec.push(1); +/// vec.push(2); +/// +/// assert_eq!(vec.len(), 2); +/// assert_eq!(vec[0], 1); +/// +/// assert_eq!(vec.pop(), Some(2)); +/// assert_eq!(vec.len(), 1); +/// +/// vec[0] = 7; +/// assert_eq!(vec[0], 7); +/// +/// vec.extend([1, 2, 3]); +/// +/// for x in &vec { +/// println!("{x}"); +/// } +/// assert_eq!(vec, [7, 1, 2, 3]); +/// ``` +/// +/// The [`vec!`] macro is provided for convenient initialization: +/// +/// ``` +/// let mut vec1 = vec![1, 2, 3]; +/// vec1.push(4); +/// let vec2 = Vec::from([1, 2, 3, 4]); +/// assert_eq!(vec1, vec2); +/// ``` +/// +/// It can also initialize each element of a `Vec<T>` with a given value. +/// This may be more efficient than performing allocation and initialization +/// in separate steps, especially when initializing a vector of zeros: +/// +/// ``` +/// let vec = vec![0; 5]; +/// assert_eq!(vec, [0, 0, 0, 0, 0]); +/// +/// // The following is equivalent, but potentially slower: +/// let mut vec = Vec::with_capacity(5); +/// vec.resize(5, 0); +/// assert_eq!(vec, [0, 0, 0, 0, 0]); +/// ``` +/// +/// For more information, see +/// [Capacity and Reallocation](#capacity-and-reallocation). +/// +/// Use a `Vec<T>` as an efficient stack: +/// +/// ``` +/// let mut stack = Vec::new(); +/// +/// stack.push(1); +/// stack.push(2); +/// stack.push(3); +/// +/// while let Some(top) = stack.pop() { +/// // Prints 3, 2, 1 +/// println!("{top}"); +/// } +/// ``` +/// +/// # Indexing +/// +/// The `Vec` type allows to access values by index, because it implements the +/// [`Index`] trait. An example will be more explicit: +/// +/// ``` +/// let v = vec![0, 2, 4, 6]; +/// println!("{}", v[1]); // it will display '2' +/// ``` +/// +/// However be careful: if you try to access an index which isn't in the `Vec`, +/// your software will panic! You cannot do this: +/// +/// ```should_panic +/// let v = vec![0, 2, 4, 6]; +/// println!("{}", v[6]); // it will panic! +/// ``` +/// +/// Use [`get`] and [`get_mut`] if you want to check whether the index is in +/// the `Vec`. +/// +/// # Slicing +/// +/// A `Vec` can be mutable. On the other hand, slices are read-only objects. +/// To get a [slice][prim@slice], use [`&`]. Example: +/// +/// ``` +/// fn read_slice(slice: &[usize]) { +/// // ... +/// } +/// +/// let v = vec![0, 1]; +/// read_slice(&v); +/// +/// // ... and that's all! +/// // you can also do it like this: +/// let u: &[usize] = &v; +/// // or like this: +/// let u: &[_] = &v; +/// ``` +/// +/// In Rust, it's more common to pass slices as arguments rather than vectors +/// when you just want to provide read access. The same goes for [`String`] and +/// [`&str`]. +/// +/// # Capacity and reallocation +/// +/// The capacity of a vector is the amount of space allocated for any future +/// elements that will be added onto the vector. This is not to be confused with +/// the *length* of a vector, which specifies the number of actual elements +/// within the vector. If a vector's length exceeds its capacity, its capacity +/// will automatically be increased, but its elements will have to be +/// reallocated. +/// +/// For example, a vector with capacity 10 and length 0 would be an empty vector +/// with space for 10 more elements. Pushing 10 or fewer elements onto the +/// vector will not change its capacity or cause reallocation to occur. However, +/// if the vector's length is increased to 11, it will have to reallocate, which +/// can be slow. For this reason, it is recommended to use [`Vec::with_capacity`] +/// whenever possible to specify how big the vector is expected to get. +/// +/// # Guarantees +/// +/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees +/// about its design. This ensures that it's as low-overhead as possible in +/// the general case, and can be correctly manipulated in primitive ways +/// by unsafe code. Note that these guarantees refer to an unqualified `Vec<T>`. +/// If additional type parameters are added (e.g., to support custom allocators), +/// overriding their defaults may change the behavior. +/// +/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length) +/// triplet. No more, no less. The order of these fields is completely +/// unspecified, and you should use the appropriate methods to modify these. +/// The pointer will never be null, so this type is null-pointer-optimized. +/// +/// However, the pointer might not actually point to allocated memory. In particular, +/// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`], +/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`] +/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized +/// types inside a `Vec`, it will not allocate space for them. *Note that in this case +/// the `Vec` might not report a [`capacity`] of 0*. `Vec` will allocate if and only +/// if <code>[mem::size_of::\<T>]\() * [capacity]\() > 0</code>. In general, `Vec`'s allocation +/// details are very subtle --- if you intend to allocate memory using a `Vec` +/// and use it for something else (either to pass to unsafe code, or to build your +/// own memory-backed collection), be sure to deallocate this memory by using +/// `from_raw_parts` to recover the `Vec` and then dropping it. +/// +/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap +/// (as defined by the allocator Rust is configured to use by default), and its +/// pointer points to [`len`] initialized, contiguous elements in order (what +/// you would see if you coerced it to a slice), followed by <code>[capacity] - [len]</code> +/// logically uninitialized, contiguous elements. +/// +/// A vector containing the elements `'a'` and `'b'` with capacity 4 can be +/// visualized as below. The top part is the `Vec` struct, it contains a +/// pointer to the head of the allocation in the heap, length and capacity. +/// The bottom part is the allocation on the heap, a contiguous memory block. +/// +/// ```text +/// ptr len capacity +/// +--------+--------+--------+ +/// | 0x0123 | 2 | 4 | +/// +--------+--------+--------+ +/// | +/// v +/// Heap +--------+--------+--------+--------+ +/// | 'a' | 'b' | uninit | uninit | +/// +--------+--------+--------+--------+ +/// ``` +/// +/// - **uninit** represents memory that is not initialized, see [`MaybeUninit`]. +/// - Note: the ABI is not stable and `Vec` makes no guarantees about its memory +/// layout (including the order of fields). +/// +/// `Vec` will never perform a "small optimization" where elements are actually +/// stored on the stack for two reasons: +/// +/// * It would make it more difficult for unsafe code to correctly manipulate +/// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were +/// only moved, and it would be more difficult to determine if a `Vec` had +/// actually allocated memory. +/// +/// * It would penalize the general case, incurring an additional branch +/// on every access. +/// +/// `Vec` will never automatically shrink itself, even if completely empty. This +/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec` +/// and then filling it back up to the same [`len`] should incur no calls to +/// the allocator. If you wish to free up unused memory, use +/// [`shrink_to_fit`] or [`shrink_to`]. +/// +/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is +/// sufficient. [`push`] and [`insert`] *will* (re)allocate if +/// <code>[len] == [capacity]</code>. That is, the reported capacity is completely +/// accurate, and can be relied on. It can even be used to manually free the memory +/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even +/// when not necessary. +/// +/// `Vec` does not guarantee any particular growth strategy when reallocating +/// when full, nor when [`reserve`] is called. The current strategy is basic +/// and it may prove desirable to use a non-constant growth factor. Whatever +/// strategy is used will of course guarantee *O*(1) amortized [`push`]. +/// +/// `vec![x; n]`, `vec![a, b, c, d]`, and +/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec` +/// with exactly the requested capacity. If <code>[len] == [capacity]</code>, +/// (as is the case for the [`vec!`] macro), then a `Vec<T>` can be converted to +/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements. +/// +/// `Vec` will not specifically overwrite any data that is removed from it, +/// but also won't specifically preserve it. Its uninitialized memory is +/// scratch space that it may use however it wants. It will generally just do +/// whatever is most efficient or otherwise easy to implement. Do not rely on +/// removed data to be erased for security purposes. Even if you drop a `Vec`, its +/// buffer may simply be reused by another allocation. Even if you zero a `Vec`'s memory +/// first, that might not actually happen because the optimizer does not consider +/// this a side-effect that must be preserved. There is one case which we will +/// not break, however: using `unsafe` code to write to the excess capacity, +/// and then increasing the length to match, is always valid. +/// +/// Currently, `Vec` does not guarantee the order in which elements are dropped. +/// The order has changed in the past and may change again. +/// +/// [`get`]: slice::get +/// [`get_mut`]: slice::get_mut +/// [`String`]: crate::string::String +/// [`&str`]: type@str +/// [`shrink_to_fit`]: Vec::shrink_to_fit +/// [`shrink_to`]: Vec::shrink_to +/// [capacity]: Vec::capacity +/// [`capacity`]: Vec::capacity +/// [mem::size_of::\<T>]: core::mem::size_of +/// [len]: Vec::len +/// [`len`]: Vec::len +/// [`push`]: Vec::push +/// [`insert`]: Vec::insert +/// [`reserve`]: Vec::reserve +/// [`MaybeUninit`]: core::mem::MaybeUninit +/// [owned slice]: Box +#[stable(feature = "rust1", since = "1.0.0")] +#[cfg_attr(not(test), rustc_diagnostic_item = "Vec")] +#[rustc_insignificant_dtor] +pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> { + buf: RawVec<T, A>, + len: usize, +} + +//////////////////////////////////////////////////////////////////////////////// +// Inherent methods +//////////////////////////////////////////////////////////////////////////////// + +impl<T> Vec<T> { + /// Constructs a new, empty `Vec<T>`. + /// + /// The vector will not allocate until elements are pushed onto it. + /// + /// # Examples + /// + /// ``` + /// # #![allow(unused_mut)] + /// let mut vec: Vec<i32> = Vec::new(); + /// ``` + #[inline] + #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")] + #[stable(feature = "rust1", since = "1.0.0")] + #[must_use] + pub const fn new() -> Self { + Vec { buf: RawVec::NEW, len: 0 } + } + + /// Constructs a new, empty `Vec<T>` with at least the specified capacity. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec<T>` where `T` is a zero-sized type, there will be no allocation + /// and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.push(i); + /// } + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.push(11); + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<()>::with_capacity(10); + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + #[must_use] + pub fn with_capacity(capacity: usize) -> Self { + Self::with_capacity_in(capacity, Global) + } + + /// Tries to construct a new, empty `Vec<T>` with at least the specified capacity. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec<T>` where `T` is a zero-sized type, there will be no allocation + /// and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Examples + /// + /// ``` + /// let mut vec = Vec::try_with_capacity(10).unwrap(); + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.push(i); + /// } + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.push(11); + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// let mut result = Vec::try_with_capacity(usize::MAX); + /// assert!(result.is_err()); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<()>::try_with_capacity(10).unwrap(); + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// ``` + #[inline] + #[stable(feature = "kernel", since = "1.0.0")] + pub fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError> { + Self::try_with_capacity_in(capacity, Global) + } + + /// Creates a `Vec<T>` directly from a pointer, a capacity, and a length. + /// + /// # Safety + /// + /// This is highly unsafe, due to the number of invariants that aren't + /// checked: + /// + /// * `ptr` must have been allocated using the global allocator, such as via + /// the [`alloc::alloc`] function. + /// * `T` needs to have the same alignment as what `ptr` was allocated with. + /// (`T` having a less strict alignment is not sufficient, the alignment really + /// needs to be equal to satisfy the [`dealloc`] requirement that memory must be + /// allocated and deallocated with the same layout.) + /// * The size of `T` times the `capacity` (ie. the allocated size in bytes) needs + /// to be the same size as the pointer was allocated with. (Because similar to + /// alignment, [`dealloc`] must be called with the same layout `size`.) + /// * `length` needs to be less than or equal to `capacity`. + /// * The first `length` values must be properly initialized values of type `T`. + /// * `capacity` needs to be the capacity that the pointer was allocated with. + /// * The allocated size in bytes must be no larger than `isize::MAX`. + /// See the safety documentation of [`pointer::offset`]. + /// + /// These requirements are always upheld by any `ptr` that has been allocated + /// via `Vec<T>`. Other allocation sources are allowed if the invariants are + /// upheld. + /// + /// Violating these may cause problems like corrupting the allocator's + /// internal data structures. For example it is normally **not** safe + /// to build a `Vec<u8>` from a pointer to a C `char` array with length + /// `size_t`, doing so is only safe if the array was initially allocated by + /// a `Vec` or `String`. + /// It's also not safe to build one from a `Vec<u16>` and its length, because + /// the allocator cares about the alignment, and these two types have different + /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after + /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1. To avoid + /// these issues, it is often preferable to do casting/transmuting using + /// [`slice::from_raw_parts`] instead. + /// + /// The ownership of `ptr` is effectively transferred to the + /// `Vec<T>` which may then deallocate, reallocate or change the + /// contents of memory pointed to by the pointer at will. Ensure + /// that nothing else uses the pointer after calling this + /// function. + /// + /// [`String`]: crate::string::String + /// [`alloc::alloc`]: crate::alloc::alloc + /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc + /// + /// # Examples + /// + /// ``` + /// use std::ptr; + /// use std::mem; + /// + /// let v = vec![1, 2, 3]; + /// + // FIXME Update this when vec_into_raw_parts is stabilized + /// // Prevent running `v`'s destructor so we are in complete control + /// // of the allocation. + /// let mut v = mem::ManuallyDrop::new(v); + /// + /// // Pull out the various important pieces of information about `v` + /// let p = v.as_mut_ptr(); + /// let len = v.len(); + /// let cap = v.capacity(); + /// + /// unsafe { + /// // Overwrite memory with 4, 5, 6 + /// for i in 0..len { + /// ptr::write(p.add(i), 4 + i); + /// } + /// + /// // Put everything back together into a Vec + /// let rebuilt = Vec::from_raw_parts(p, len, cap); + /// assert_eq!(rebuilt, [4, 5, 6]); + /// } + /// ``` + /// + /// Using memory that was allocated elsewhere: + /// + /// ```rust + /// #![feature(allocator_api)] + /// + /// use std::alloc::{AllocError, Allocator, Global, Layout}; + /// + /// fn main() { + /// let layout = Layout::array::<u32>(16).expect("overflow cannot happen"); + /// + /// let vec = unsafe { + /// let mem = match Global.allocate(layout) { + /// Ok(mem) => mem.cast::<u32>().as_ptr(), + /// Err(AllocError) => return, + /// }; + /// + /// mem.write(1_000_000); + /// + /// Vec::from_raw_parts_in(mem, 1, 16, Global) + /// }; + /// + /// assert_eq!(vec, &[1_000_000]); + /// assert_eq!(vec.capacity(), 16); + /// } + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self { + unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) } + } +} + +impl<T, A: Allocator> Vec<T, A> { + /// Constructs a new, empty `Vec<T, A>`. + /// + /// The vector will not allocate until elements are pushed onto it. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::alloc::System; + /// + /// # #[allow(unused_mut)] + /// let mut vec: Vec<i32, _> = Vec::new_in(System); + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn new_in(alloc: A) -> Self { + Vec { buf: RawVec::new_in(alloc), len: 0 } + } + + /// Constructs a new, empty `Vec<T, A>` with at least the specified capacity + /// with the provided allocator. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec<T, A>` where `T` is a zero-sized type, there will be no allocation + /// and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::alloc::System; + /// + /// let mut vec = Vec::with_capacity_in(10, System); + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.push(i); + /// } + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.push(11); + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<(), System>::with_capacity_in(10, System); + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } + } + + /// Tries to construct a new, empty `Vec<T, A>` with at least the specified capacity + /// with the provided allocator. + /// + /// The vector will be able to hold at least `capacity` elements without + /// reallocating. This method is allowed to allocate for more elements than + /// `capacity`. If `capacity` is 0, the vector will not allocate. + /// + /// It is important to note that although the returned vector has the + /// minimum *capacity* specified, the vector will have a zero *length*. For + /// an explanation of the difference between length and capacity, see + /// *[Capacity and reallocation]*. + /// + /// If it is important to know the exact allocated capacity of a `Vec`, + /// always use the [`capacity`] method after construction. + /// + /// For `Vec<T, A>` where `T` is a zero-sized type, there will be no allocation + /// and the capacity will always be `usize::MAX`. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation + /// [`capacity`]: Vec::capacity + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::alloc::System; + /// + /// let mut vec = Vec::try_with_capacity_in(10, System).unwrap(); + /// + /// // The vector contains no items, even though it has capacity for more + /// assert_eq!(vec.len(), 0); + /// assert!(vec.capacity() >= 10); + /// + /// // These are all done without reallocating... + /// for i in 0..10 { + /// vec.push(i); + /// } + /// assert_eq!(vec.len(), 10); + /// assert!(vec.capacity() >= 10); + /// + /// // ...but this may make the vector reallocate + /// vec.push(11); + /// assert_eq!(vec.len(), 11); + /// assert!(vec.capacity() >= 11); + /// + /// let mut result = Vec::try_with_capacity_in(usize::MAX, System); + /// assert!(result.is_err()); + /// + /// // A vector of a zero-sized type will always over-allocate, since no + /// // allocation is necessary + /// let vec_units = Vec::<(), System>::try_with_capacity_in(10, System).unwrap(); + /// assert_eq!(vec_units.capacity(), usize::MAX); + /// ``` + #[inline] + #[stable(feature = "kernel", since = "1.0.0")] + pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> { + Ok(Vec { buf: RawVec::try_with_capacity_in(capacity, alloc)?, len: 0 }) + } + + /// Creates a `Vec<T, A>` directly from a pointer, a capacity, a length, + /// and an allocator. + /// + /// # Safety + /// + /// This is highly unsafe, due to the number of invariants that aren't + /// checked: + /// + /// * `ptr` must be [*currently allocated*] via the given allocator `alloc`. + /// * `T` needs to have the same alignment as what `ptr` was allocated with. + /// (`T` having a less strict alignment is not sufficient, the alignment really + /// needs to be equal to satisfy the [`dealloc`] requirement that memory must be + /// allocated and deallocated with the same layout.) + /// * The size of `T` times the `capacity` (ie. the allocated size in bytes) needs + /// to be the same size as the pointer was allocated with. (Because similar to + /// alignment, [`dealloc`] must be called with the same layout `size`.) + /// * `length` needs to be less than or equal to `capacity`. + /// * The first `length` values must be properly initialized values of type `T`. + /// * `capacity` needs to [*fit*] the layout size that the pointer was allocated with. + /// * The allocated size in bytes must be no larger than `isize::MAX`. + /// See the safety documentation of [`pointer::offset`]. + /// + /// These requirements are always upheld by any `ptr` that has been allocated + /// via `Vec<T, A>`. Other allocation sources are allowed if the invariants are + /// upheld. + /// + /// Violating these may cause problems like corrupting the allocator's + /// internal data structures. For example it is **not** safe + /// to build a `Vec<u8>` from a pointer to a C `char` array with length `size_t`. + /// It's also not safe to build one from a `Vec<u16>` and its length, because + /// the allocator cares about the alignment, and these two types have different + /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after + /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1. + /// + /// The ownership of `ptr` is effectively transferred to the + /// `Vec<T>` which may then deallocate, reallocate or change the + /// contents of memory pointed to by the pointer at will. Ensure + /// that nothing else uses the pointer after calling this + /// function. + /// + /// [`String`]: crate::string::String + /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc + /// [*currently allocated*]: crate::alloc::Allocator#currently-allocated-memory + /// [*fit*]: crate::alloc::Allocator#memory-fitting + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::alloc::System; + /// + /// use std::ptr; + /// use std::mem; + /// + /// let mut v = Vec::with_capacity_in(3, System); + /// v.push(1); + /// v.push(2); + /// v.push(3); + /// + // FIXME Update this when vec_into_raw_parts is stabilized + /// // Prevent running `v`'s destructor so we are in complete control + /// // of the allocation. + /// let mut v = mem::ManuallyDrop::new(v); + /// + /// // Pull out the various important pieces of information about `v` + /// let p = v.as_mut_ptr(); + /// let len = v.len(); + /// let cap = v.capacity(); + /// let alloc = v.allocator(); + /// + /// unsafe { + /// // Overwrite memory with 4, 5, 6 + /// for i in 0..len { + /// ptr::write(p.add(i), 4 + i); + /// } + /// + /// // Put everything back together into a Vec + /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone()); + /// assert_eq!(rebuilt, [4, 5, 6]); + /// } + /// ``` + /// + /// Using memory that was allocated elsewhere: + /// + /// ```rust + /// use std::alloc::{alloc, Layout}; + /// + /// fn main() { + /// let layout = Layout::array::<u32>(16).expect("overflow cannot happen"); + /// let vec = unsafe { + /// let mem = alloc(layout).cast::<u32>(); + /// if mem.is_null() { + /// return; + /// } + /// + /// mem.write(1_000_000); + /// + /// Vec::from_raw_parts(mem, 1, 16) + /// }; + /// + /// assert_eq!(vec, &[1_000_000]); + /// assert_eq!(vec.capacity(), 16); + /// } + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self { + unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } } + } + + /// Decomposes a `Vec<T>` into its raw components. + /// + /// Returns the raw pointer to the underlying data, the length of + /// the vector (in elements), and the allocated capacity of the + /// data (in elements). These are the same arguments in the same + /// order as the arguments to [`from_raw_parts`]. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `Vec`. The only way to do + /// this is to convert the raw pointer, length, and capacity back + /// into a `Vec` with the [`from_raw_parts`] function, allowing + /// the destructor to perform the cleanup. + /// + /// [`from_raw_parts`]: Vec::from_raw_parts + /// + /// # Examples + /// + /// ``` + /// #![feature(vec_into_raw_parts)] + /// let v: Vec<i32> = vec![-1, 0, 1]; + /// + /// let (ptr, len, cap) = v.into_raw_parts(); + /// + /// let rebuilt = unsafe { + /// // We can now make changes to the components, such as + /// // transmuting the raw pointer to a compatible type. + /// let ptr = ptr as *mut u32; + /// + /// Vec::from_raw_parts(ptr, len, cap) + /// }; + /// assert_eq!(rebuilt, [4294967295, 0, 1]); + /// ``` + #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] + pub fn into_raw_parts(self) -> (*mut T, usize, usize) { + let mut me = ManuallyDrop::new(self); + (me.as_mut_ptr(), me.len(), me.capacity()) + } + + /// Decomposes a `Vec<T>` into its raw components. + /// + /// Returns the raw pointer to the underlying data, the length of the vector (in elements), + /// the allocated capacity of the data (in elements), and the allocator. These are the same + /// arguments in the same order as the arguments to [`from_raw_parts_in`]. + /// + /// After calling this function, the caller is responsible for the + /// memory previously managed by the `Vec`. The only way to do + /// this is to convert the raw pointer, length, and capacity back + /// into a `Vec` with the [`from_raw_parts_in`] function, allowing + /// the destructor to perform the cleanup. + /// + /// [`from_raw_parts_in`]: Vec::from_raw_parts_in + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, vec_into_raw_parts)] + /// + /// use std::alloc::System; + /// + /// let mut v: Vec<i32, System> = Vec::new_in(System); + /// v.push(-1); + /// v.push(0); + /// v.push(1); + /// + /// let (ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); + /// + /// let rebuilt = unsafe { + /// // We can now make changes to the components, such as + /// // transmuting the raw pointer to a compatible type. + /// let ptr = ptr as *mut u32; + /// + /// Vec::from_raw_parts_in(ptr, len, cap, alloc) + /// }; + /// assert_eq!(rebuilt, [4294967295, 0, 1]); + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")] + pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) { + let mut me = ManuallyDrop::new(self); + let len = me.len(); + let capacity = me.capacity(); + let ptr = me.as_mut_ptr(); + let alloc = unsafe { ptr::read(me.allocator()) }; + (ptr, len, capacity, alloc) + } + + /// Returns the total number of elements the vector can hold without + /// reallocating. + /// + /// # Examples + /// + /// ``` + /// let mut vec: Vec<i32> = Vec::with_capacity(10); + /// vec.push(42); + /// assert!(vec.capacity() >= 10); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn capacity(&self) -> usize { + self.buf.capacity() + } + + /// Reserves capacity for at least `additional` more elements to be inserted + /// in the given `Vec<T>`. The collection may reserve more space to + /// speculatively avoid frequent reallocations. After calling `reserve`, + /// capacity will be greater than or equal to `self.len() + additional`. + /// Does nothing if capacity is already sufficient. + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1]; + /// vec.reserve(10); + /// assert!(vec.capacity() >= 11); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn reserve(&mut self, additional: usize) { + self.buf.reserve(self.len, additional); + } + + /// Reserves the minimum capacity for at least `additional` more elements to + /// be inserted in the given `Vec<T>`. Unlike [`reserve`], this will not + /// deliberately over-allocate to speculatively avoid frequent allocations. + /// After calling `reserve_exact`, capacity will be greater than or equal to + /// `self.len() + additional`. Does nothing if the capacity is already + /// sufficient. + /// + /// Note that the allocator may give the collection more space than it + /// requests. Therefore, capacity can not be relied upon to be precisely + /// minimal. Prefer [`reserve`] if future insertions are expected. + /// + /// [`reserve`]: Vec::reserve + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1]; + /// vec.reserve_exact(10); + /// assert!(vec.capacity() >= 11); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn reserve_exact(&mut self, additional: usize) { + self.buf.reserve_exact(self.len, additional); + } + + /// Tries to reserve capacity for at least `additional` more elements to be inserted + /// in the given `Vec<T>`. The collection may reserve more space to speculatively avoid + /// frequent reallocations. After calling `try_reserve`, capacity will be + /// greater than or equal to `self.len() + additional` if it returns + /// `Ok(())`. Does nothing if capacity is already sufficient. This method + /// preserves the contents even if an error occurs. + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use std::collections::TryReserveError; + /// + /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> { + /// let mut output = Vec::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve(data.len())?; + /// + /// // Now we know this can't OOM in the middle of our complex work + /// output.extend(data.iter().map(|&val| { + /// val * 2 + 5 // very complicated + /// })); + /// + /// Ok(output) + /// } + /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); + /// ``` + #[stable(feature = "try_reserve", since = "1.57.0")] + pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { + self.buf.try_reserve(self.len, additional) + } + + /// Tries to reserve the minimum capacity for at least `additional` + /// elements to be inserted in the given `Vec<T>`. Unlike [`try_reserve`], + /// this will not deliberately over-allocate to speculatively avoid frequent + /// allocations. After calling `try_reserve_exact`, capacity will be greater + /// than or equal to `self.len() + additional` if it returns `Ok(())`. + /// Does nothing if the capacity is already sufficient. + /// + /// Note that the allocator may give the collection more space than it + /// requests. Therefore, capacity can not be relied upon to be precisely + /// minimal. Prefer [`try_reserve`] if future insertions are expected. + /// + /// [`try_reserve`]: Vec::try_reserve + /// + /// # Errors + /// + /// If the capacity overflows, or the allocator reports a failure, then an error + /// is returned. + /// + /// # Examples + /// + /// ``` + /// use std::collections::TryReserveError; + /// + /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> { + /// let mut output = Vec::new(); + /// + /// // Pre-reserve the memory, exiting if we can't + /// output.try_reserve_exact(data.len())?; + /// + /// // Now we know this can't OOM in the middle of our complex work + /// output.extend(data.iter().map(|&val| { + /// val * 2 + 5 // very complicated + /// })); + /// + /// Ok(output) + /// } + /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?"); + /// ``` + #[stable(feature = "try_reserve", since = "1.57.0")] + pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { + self.buf.try_reserve_exact(self.len, additional) + } + + /// Shrinks the capacity of the vector as much as possible. + /// + /// It will drop down as close as possible to the length but the allocator + /// may still inform the vector that there is space for a few more elements. + /// + /// # Examples + /// + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// vec.extend([1, 2, 3]); + /// assert!(vec.capacity() >= 10); + /// vec.shrink_to_fit(); + /// assert!(vec.capacity() >= 3); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn shrink_to_fit(&mut self) { + // The capacity is never less than the length, and there's nothing to do when + // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit` + // by only calling it with a greater capacity. + if self.capacity() > self.len { + self.buf.shrink_to_fit(self.len); + } + } + + /// Shrinks the capacity of the vector with a lower bound. + /// + /// The capacity will remain at least as large as both the length + /// and the supplied value. + /// + /// If the current capacity is less than the lower limit, this is a no-op. + /// + /// # Examples + /// + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// vec.extend([1, 2, 3]); + /// assert!(vec.capacity() >= 10); + /// vec.shrink_to(4); + /// assert!(vec.capacity() >= 4); + /// vec.shrink_to(0); + /// assert!(vec.capacity() >= 3); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "shrink_to", since = "1.56.0")] + pub fn shrink_to(&mut self, min_capacity: usize) { + if self.capacity() > min_capacity { + self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); + } + } + + /// Converts the vector into [`Box<[T]>`][owned slice]. + /// + /// If the vector has excess capacity, its items will be moved into a + /// newly-allocated buffer with exactly the right capacity. + /// + /// [owned slice]: Box + /// + /// # Examples + /// + /// ``` + /// let v = vec![1, 2, 3]; + /// + /// let slice = v.into_boxed_slice(); + /// ``` + /// + /// Any excess capacity is removed: + /// + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// vec.extend([1, 2, 3]); + /// + /// assert!(vec.capacity() >= 10); + /// let slice = vec.into_boxed_slice(); + /// assert_eq!(slice.into_vec().capacity(), 3); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn into_boxed_slice(mut self) -> Box<[T], A> { + unsafe { + self.shrink_to_fit(); + let me = ManuallyDrop::new(self); + let buf = ptr::read(&me.buf); + let len = me.len(); + buf.into_box(len).assume_init() + } + } + + /// Shortens the vector, keeping the first `len` elements and dropping + /// the rest. + /// + /// If `len` is greater than the vector's current length, this has no + /// effect. + /// + /// The [`drain`] method can emulate `truncate`, but causes the excess + /// elements to be returned instead of dropped. + /// + /// Note that this method has no effect on the allocated capacity + /// of the vector. + /// + /// # Examples + /// + /// Truncating a five element vector to two elements: + /// + /// ``` + /// let mut vec = vec![1, 2, 3, 4, 5]; + /// vec.truncate(2); + /// assert_eq!(vec, [1, 2]); + /// ``` + /// + /// No truncation occurs when `len` is greater than the vector's current + /// length: + /// + /// ``` + /// let mut vec = vec![1, 2, 3]; + /// vec.truncate(8); + /// assert_eq!(vec, [1, 2, 3]); + /// ``` + /// + /// Truncating when `len == 0` is equivalent to calling the [`clear`] + /// method. + /// + /// ``` + /// let mut vec = vec![1, 2, 3]; + /// vec.truncate(0); + /// assert_eq!(vec, []); + /// ``` + /// + /// [`clear`]: Vec::clear + /// [`drain`]: Vec::drain + #[stable(feature = "rust1", since = "1.0.0")] + pub fn truncate(&mut self, len: usize) { + // This is safe because: + // + // * the slice passed to `drop_in_place` is valid; the `len > self.len` + // case avoids creating an invalid slice, and + // * the `len` of the vector is shrunk before calling `drop_in_place`, + // such that no value will be dropped twice in case `drop_in_place` + // were to panic once (if it panics twice, the program aborts). + unsafe { + // Note: It's intentional that this is `>` and not `>=`. + // Changing it to `>=` has negative performance + // implications in some cases. See #78884 for more. + if len > self.len { + return; + } + let remaining_len = self.len - len; + let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); + self.len = len; + ptr::drop_in_place(s); + } + } + + /// Extracts a slice containing the entire vector. + /// + /// Equivalent to `&s[..]`. + /// + /// # Examples + /// + /// ``` + /// use std::io::{self, Write}; + /// let buffer = vec![1, 2, 3, 5, 8]; + /// io::sink().write(buffer.as_slice()).unwrap(); + /// ``` + #[inline] + #[stable(feature = "vec_as_slice", since = "1.7.0")] + pub fn as_slice(&self) -> &[T] { + self + } + + /// Extracts a mutable slice of the entire vector. + /// + /// Equivalent to `&mut s[..]`. + /// + /// # Examples + /// + /// ``` + /// use std::io::{self, Read}; + /// let mut buffer = vec![0; 3]; + /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap(); + /// ``` + #[inline] + #[stable(feature = "vec_as_slice", since = "1.7.0")] + pub fn as_mut_slice(&mut self) -> &mut [T] { + self + } + + /// Returns a raw pointer to the vector's buffer, or a dangling raw pointer + /// valid for zero sized reads if the vector didn't allocate. + /// + /// The caller must ensure that the vector outlives the pointer this + /// function returns, or else it will end up pointing to garbage. + /// Modifying the vector may cause its buffer to be reallocated, + /// which would also make any pointers to it invalid. + /// + /// The caller must also ensure that the memory the pointer (non-transitively) points to + /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer + /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`]. + /// + /// # Examples + /// + /// ``` + /// let x = vec![1, 2, 4]; + /// let x_ptr = x.as_ptr(); + /// + /// unsafe { + /// for i in 0..x.len() { + /// assert_eq!(*x_ptr.add(i), 1 << i); + /// } + /// } + /// ``` + /// + /// [`as_mut_ptr`]: Vec::as_mut_ptr + #[stable(feature = "vec_as_ptr", since = "1.37.0")] + #[inline] + pub fn as_ptr(&self) -> *const T { + // We shadow the slice method of the same name to avoid going through + // `deref`, which creates an intermediate reference. + self.buf.ptr() + } + + /// Returns an unsafe mutable pointer to the vector's buffer, or a dangling + /// raw pointer valid for zero sized reads if the vector didn't allocate. + /// + /// The caller must ensure that the vector outlives the pointer this + /// function returns, or else it will end up pointing to garbage. + /// Modifying the vector may cause its buffer to be reallocated, + /// which would also make any pointers to it invalid. + /// + /// # Examples + /// + /// ``` + /// // Allocate vector big enough for 4 elements. + /// let size = 4; + /// let mut x: Vec<i32> = Vec::with_capacity(size); + /// let x_ptr = x.as_mut_ptr(); + /// + /// // Initialize elements via raw pointer writes, then set length. + /// unsafe { + /// for i in 0..size { + /// *x_ptr.add(i) = i as i32; + /// } + /// x.set_len(size); + /// } + /// assert_eq!(&*x, &[0, 1, 2, 3]); + /// ``` + #[stable(feature = "vec_as_ptr", since = "1.37.0")] + #[inline] + pub fn as_mut_ptr(&mut self) -> *mut T { + // We shadow the slice method of the same name to avoid going through + // `deref_mut`, which creates an intermediate reference. + self.buf.ptr() + } + + /// Returns a reference to the underlying allocator. + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn allocator(&self) -> &A { + self.buf.allocator() + } + + /// Forces the length of the vector to `new_len`. + /// + /// This is a low-level operation that maintains none of the normal + /// invariants of the type. Normally changing the length of a vector + /// is done using one of the safe operations instead, such as + /// [`truncate`], [`resize`], [`extend`], or [`clear`]. + /// + /// [`truncate`]: Vec::truncate + /// [`resize`]: Vec::resize + /// [`extend`]: Extend::extend + /// [`clear`]: Vec::clear + /// + /// # Safety + /// + /// - `new_len` must be less than or equal to [`capacity()`]. + /// - The elements at `old_len..new_len` must be initialized. + /// + /// [`capacity()`]: Vec::capacity + /// + /// # Examples + /// + /// This method can be useful for situations in which the vector + /// is serving as a buffer for other code, particularly over FFI: + /// + /// ```no_run + /// # #![allow(dead_code)] + /// # // This is just a minimal skeleton for the doc example; + /// # // don't use this as a starting point for a real library. + /// # pub struct StreamWrapper { strm: *mut std::ffi::c_void } + /// # const Z_OK: i32 = 0; + /// # extern "C" { + /// # fn deflateGetDictionary( + /// # strm: *mut std::ffi::c_void, + /// # dictionary: *mut u8, + /// # dictLength: *mut usize, + /// # ) -> i32; + /// # } + /// # impl StreamWrapper { + /// pub fn get_dictionary(&self) -> Option<Vec<u8>> { + /// // Per the FFI method's docs, "32768 bytes is always enough". + /// let mut dict = Vec::with_capacity(32_768); + /// let mut dict_length = 0; + /// // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that: + /// // 1. `dict_length` elements were initialized. + /// // 2. `dict_length` <= the capacity (32_768) + /// // which makes `set_len` safe to call. + /// unsafe { + /// // Make the FFI call... + /// let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length); + /// if r == Z_OK { + /// // ...and update the length to what was initialized. + /// dict.set_len(dict_length); + /// Some(dict) + /// } else { + /// None + /// } + /// } + /// } + /// # } + /// ``` + /// + /// While the following example is sound, there is a memory leak since + /// the inner vectors were not freed prior to the `set_len` call: + /// + /// ``` + /// let mut vec = vec![vec![1, 0, 0], + /// vec![0, 1, 0], + /// vec![0, 0, 1]]; + /// // SAFETY: + /// // 1. `old_len..0` is empty so no elements need to be initialized. + /// // 2. `0 <= capacity` always holds whatever `capacity` is. + /// unsafe { + /// vec.set_len(0); + /// } + /// ``` + /// + /// Normally, here, one would use [`clear`] instead to correctly drop + /// the contents and thus not leak memory. + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub unsafe fn set_len(&mut self, new_len: usize) { + debug_assert!(new_len <= self.capacity()); + + self.len = new_len; + } + + /// Removes an element from the vector and returns it. + /// + /// The removed element is replaced by the last element of the vector. + /// + /// This does not preserve ordering, but is *O*(1). + /// If you need to preserve the element order, use [`remove`] instead. + /// + /// [`remove`]: Vec::remove + /// + /// # Panics + /// + /// Panics if `index` is out of bounds. + /// + /// # Examples + /// + /// ``` + /// let mut v = vec!["foo", "bar", "baz", "qux"]; + /// + /// assert_eq!(v.swap_remove(1), "bar"); + /// assert_eq!(v, ["foo", "qux", "baz"]); + /// + /// assert_eq!(v.swap_remove(0), "foo"); + /// assert_eq!(v, ["baz", "qux"]); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn swap_remove(&mut self, index: usize) -> T { + #[cold] + #[inline(never)] + fn assert_failed(index: usize, len: usize) -> ! { + panic!("swap_remove index (is {index}) should be < len (is {len})"); + } + + let len = self.len(); + if index >= len { + assert_failed(index, len); + } + unsafe { + // We replace self[index] with the last element. Note that if the + // bounds check above succeeds there must be a last element (which + // can be self[index] itself). + let value = ptr::read(self.as_ptr().add(index)); + let base_ptr = self.as_mut_ptr(); + ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1); + self.set_len(len - 1); + value + } + } + + /// Inserts an element at position `index` within the vector, shifting all + /// elements after it to the right. + /// + /// # Panics + /// + /// Panics if `index > len`. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 3]; + /// vec.insert(1, 4); + /// assert_eq!(vec, [1, 4, 2, 3]); + /// vec.insert(4, 5); + /// assert_eq!(vec, [1, 4, 2, 3, 5]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn insert(&mut self, index: usize, element: T) { + #[cold] + #[inline(never)] + fn assert_failed(index: usize, len: usize) -> ! { + panic!("insertion index (is {index}) should be <= len (is {len})"); + } + + let len = self.len(); + + // space for the new element + if len == self.buf.capacity() { + self.reserve(1); + } + + unsafe { + // infallible + // The spot to put the new value + { + let p = self.as_mut_ptr().add(index); + if index < len { + // Shift everything over to make space. (Duplicating the + // `index`th element into two consecutive places.) + ptr::copy(p, p.add(1), len - index); + } else if index == len { + // No elements need shifting. + } else { + assert_failed(index, len); + } + // Write it in, overwriting the first copy of the `index`th + // element. + ptr::write(p, element); + } + self.set_len(len + 1); + } + } + + /// Removes and returns the element at position `index` within the vector, + /// shifting all elements after it to the left. + /// + /// Note: Because this shifts over the remaining elements, it has a + /// worst-case performance of *O*(*n*). If you don't need the order of elements + /// to be preserved, use [`swap_remove`] instead. If you'd like to remove + /// elements from the beginning of the `Vec`, consider using + /// [`VecDeque::pop_front`] instead. + /// + /// [`swap_remove`]: Vec::swap_remove + /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front + /// + /// # Panics + /// + /// Panics if `index` is out of bounds. + /// + /// # Examples + /// + /// ``` + /// let mut v = vec![1, 2, 3]; + /// assert_eq!(v.remove(1), 2); + /// assert_eq!(v, [1, 3]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + #[track_caller] + pub fn remove(&mut self, index: usize) -> T { + #[cold] + #[inline(never)] + #[track_caller] + fn assert_failed(index: usize, len: usize) -> ! { + panic!("removal index (is {index}) should be < len (is {len})"); + } + + let len = self.len(); + if index >= len { + assert_failed(index, len); + } + unsafe { + // infallible + let ret; + { + // the place we are taking from. + let ptr = self.as_mut_ptr().add(index); + // copy it out, unsafely having a copy of the value on + // the stack and in the vector at the same time. + ret = ptr::read(ptr); + + // Shift everything down to fill in that spot. + ptr::copy(ptr.add(1), ptr, len - index - 1); + } + self.set_len(len - 1); + ret + } + } + + /// Retains only the elements specified by the predicate. + /// + /// In other words, remove all elements `e` for which `f(&e)` returns `false`. + /// This method operates in place, visiting each element exactly once in the + /// original order, and preserves the order of the retained elements. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 3, 4]; + /// vec.retain(|&x| x % 2 == 0); + /// assert_eq!(vec, [2, 4]); + /// ``` + /// + /// Because the elements are visited exactly once in the original order, + /// external state may be used to decide which elements to keep. + /// + /// ``` + /// let mut vec = vec![1, 2, 3, 4, 5]; + /// let keep = [false, true, true, false, true]; + /// let mut iter = keep.iter(); + /// vec.retain(|_| *iter.next().unwrap()); + /// assert_eq!(vec, [2, 3, 5]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn retain<F>(&mut self, mut f: F) + where + F: FnMut(&T) -> bool, + { + self.retain_mut(|elem| f(elem)); + } + + /// Retains only the elements specified by the predicate, passing a mutable reference to it. + /// + /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`. + /// This method operates in place, visiting each element exactly once in the + /// original order, and preserves the order of the retained elements. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 3, 4]; + /// vec.retain_mut(|x| if *x <= 3 { + /// *x += 1; + /// true + /// } else { + /// false + /// }); + /// assert_eq!(vec, [2, 3, 4]); + /// ``` + #[stable(feature = "vec_retain_mut", since = "1.61.0")] + pub fn retain_mut<F>(&mut self, mut f: F) + where + F: FnMut(&mut T) -> bool, + { + let original_len = self.len(); + // Avoid double drop if the drop guard is not executed, + // since we may make some holes during the process. + unsafe { self.set_len(0) }; + + // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked] + // |<- processed len ->| ^- next to check + // |<- deleted cnt ->| + // |<- original_len ->| + // Kept: Elements which predicate returns true on. + // Hole: Moved or dropped element slot. + // Unchecked: Unchecked valid elements. + // + // This drop guard will be invoked when predicate or `drop` of element panicked. + // It shifts unchecked elements to cover holes and `set_len` to the correct length. + // In cases when predicate and `drop` never panick, it will be optimized out. + struct BackshiftOnDrop<'a, T, A: Allocator> { + v: &'a mut Vec<T, A>, + processed_len: usize, + deleted_cnt: usize, + original_len: usize, + } + + impl<T, A: Allocator> Drop for BackshiftOnDrop<'_, T, A> { + fn drop(&mut self) { + if self.deleted_cnt > 0 { + // SAFETY: Trailing unchecked items must be valid since we never touch them. + unsafe { + ptr::copy( + self.v.as_ptr().add(self.processed_len), + self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt), + self.original_len - self.processed_len, + ); + } + } + // SAFETY: After filling holes, all items are in contiguous memory. + unsafe { + self.v.set_len(self.original_len - self.deleted_cnt); + } + } + } + + let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; + + fn process_loop<F, T, A: Allocator, const DELETED: bool>( + original_len: usize, + f: &mut F, + g: &mut BackshiftOnDrop<'_, T, A>, + ) where + F: FnMut(&mut T) -> bool, + { + while g.processed_len != original_len { + // SAFETY: Unchecked element must be valid. + let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) }; + if !f(cur) { + // Advance early to avoid double drop if `drop_in_place` panicked. + g.processed_len += 1; + g.deleted_cnt += 1; + // SAFETY: We never touch this element again after dropped. + unsafe { ptr::drop_in_place(cur) }; + // We already advanced the counter. + if DELETED { + continue; + } else { + break; + } + } + if DELETED { + // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. + // We use copy for move, and never touch this element again. + unsafe { + let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt); + ptr::copy_nonoverlapping(cur, hole_slot, 1); + } + } + g.processed_len += 1; + } + } + + // Stage 1: Nothing was deleted. + process_loop::<F, T, A, false>(original_len, &mut f, &mut g); + + // Stage 2: Some elements were deleted. + process_loop::<F, T, A, true>(original_len, &mut f, &mut g); + + // All item are processed. This can be optimized to `set_len` by LLVM. + drop(g); + } + + /// Removes all but the first of consecutive elements in the vector that resolve to the same + /// key. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![10, 20, 21, 30, 20]; + /// + /// vec.dedup_by_key(|i| *i / 10); + /// + /// assert_eq!(vec, [10, 20, 30, 20]); + /// ``` + #[stable(feature = "dedup_by", since = "1.16.0")] + #[inline] + pub fn dedup_by_key<F, K>(&mut self, mut key: F) + where + F: FnMut(&mut T) -> K, + K: PartialEq, + { + self.dedup_by(|a, b| key(a) == key(b)) + } + + /// Removes all but the first of consecutive elements in the vector satisfying a given equality + /// relation. + /// + /// The `same_bucket` function is passed references to two elements from the vector and + /// must determine if the elements compare equal. The elements are passed in opposite order + /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; + /// + /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); + /// + /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); + /// ``` + #[stable(feature = "dedup_by", since = "1.16.0")] + pub fn dedup_by<F>(&mut self, mut same_bucket: F) + where + F: FnMut(&mut T, &mut T) -> bool, + { + let len = self.len(); + if len <= 1 { + return; + } + + /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ + struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { + /* Offset of the element we want to check if it is duplicate */ + read: usize, + + /* Offset of the place where we want to place the non-duplicate + * when we find it. */ + write: usize, + + /* The Vec that would need correction if `same_bucket` panicked */ + vec: &'a mut Vec<T, A>, + } + + impl<'a, T, A: core::alloc::Allocator> Drop for FillGapOnDrop<'a, T, A> { + fn drop(&mut self) { + /* This code gets executed when `same_bucket` panics */ + + /* SAFETY: invariant guarantees that `read - write` + * and `len - read` never overflow and that the copy is always + * in-bounds. */ + unsafe { + let ptr = self.vec.as_mut_ptr(); + let len = self.vec.len(); + + /* How many items were left when `same_bucket` panicked. + * Basically vec[read..].len() */ + let items_left = len.wrapping_sub(self.read); + + /* Pointer to first item in vec[write..write+items_left] slice */ + let dropped_ptr = ptr.add(self.write); + /* Pointer to first item in vec[read..] slice */ + let valid_ptr = ptr.add(self.read); + + /* Copy `vec[read..]` to `vec[write..write+items_left]`. + * The slices can overlap, so `copy_nonoverlapping` cannot be used */ + ptr::copy(valid_ptr, dropped_ptr, items_left); + + /* How many items have been already dropped + * Basically vec[read..write].len() */ + let dropped = self.read.wrapping_sub(self.write); + + self.vec.set_len(len - dropped); + } + } + } + + let mut gap = FillGapOnDrop { read: 1, write: 1, vec: self }; + let ptr = gap.vec.as_mut_ptr(); + + /* Drop items while going through Vec, it should be more efficient than + * doing slice partition_dedup + truncate */ + + /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr + * are always in-bounds and read_ptr never aliases prev_ptr */ + unsafe { + while gap.read < len { + let read_ptr = ptr.add(gap.read); + let prev_ptr = ptr.add(gap.write.wrapping_sub(1)); + + if same_bucket(&mut *read_ptr, &mut *prev_ptr) { + // Increase `gap.read` now since the drop may panic. + gap.read += 1; + /* We have found duplicate, drop it in-place */ + ptr::drop_in_place(read_ptr); + } else { + let write_ptr = ptr.add(gap.write); + + /* Because `read_ptr` can be equal to `write_ptr`, we either + * have to use `copy` or conditional `copy_nonoverlapping`. + * Looks like the first option is faster. */ + ptr::copy(read_ptr, write_ptr, 1); + + /* We have filled that place, so go further */ + gap.write += 1; + gap.read += 1; + } + } + + /* Technically we could let `gap` clean up with its Drop, but + * when `same_bucket` is guaranteed to not panic, this bloats a little + * the codegen, so we just do it manually */ + gap.vec.set_len(gap.write); + mem::forget(gap); + } + } + + /// Appends an element to the back of a collection. + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2]; + /// vec.push(3); + /// assert_eq!(vec, [1, 2, 3]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn push(&mut self, value: T) { + // This will panic or abort if we would allocate > isize::MAX bytes + // or if the length increment would overflow for zero-sized types. + if self.len == self.buf.capacity() { + self.buf.reserve_for_push(self.len); + } + unsafe { + let end = self.as_mut_ptr().add(self.len); + ptr::write(end, value); + self.len += 1; + } + } + + /// Tries to append an element to the back of a collection. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2]; + /// vec.try_push(3).unwrap(); + /// assert_eq!(vec, [1, 2, 3]); + /// ``` + #[inline] + #[stable(feature = "kernel", since = "1.0.0")] + pub fn try_push(&mut self, value: T) -> Result<(), TryReserveError> { + if self.len == self.buf.capacity() { + self.buf.try_reserve_for_push(self.len)?; + } + unsafe { + let end = self.as_mut_ptr().add(self.len); + ptr::write(end, value); + self.len += 1; + } + Ok(()) + } + + /// Appends an element if there is sufficient spare capacity, otherwise an error is returned + /// with the element. + /// + /// Unlike [`push`] this method will not reallocate when there's insufficient capacity. + /// The caller should use [`reserve`] or [`try_reserve`] to ensure that there is enough capacity. + /// + /// [`push`]: Vec::push + /// [`reserve`]: Vec::reserve + /// [`try_reserve`]: Vec::try_reserve + /// + /// # Examples + /// + /// A manual, panic-free alternative to [`FromIterator`]: + /// + /// ``` + /// #![feature(vec_push_within_capacity)] + /// + /// use std::collections::TryReserveError; + /// fn from_iter_fallible<T>(iter: impl Iterator<Item=T>) -> Result<Vec<T>, TryReserveError> { + /// let mut vec = Vec::new(); + /// for value in iter { + /// if let Err(value) = vec.push_within_capacity(value) { + /// vec.try_reserve(1)?; + /// // this cannot fail, the previous line either returned or added at least 1 free slot + /// let _ = vec.push_within_capacity(value); + /// } + /// } + /// Ok(vec) + /// } + /// assert_eq!(from_iter_fallible(0..100), Ok(Vec::from_iter(0..100))); + /// ``` + #[inline] + #[unstable(feature = "vec_push_within_capacity", issue = "100486")] + pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> { + if self.len == self.buf.capacity() { + return Err(value); + } + unsafe { + let end = self.as_mut_ptr().add(self.len); + ptr::write(end, value); + self.len += 1; + } + Ok(()) + } + + /// Removes the last element from a vector and returns it, or [`None`] if it + /// is empty. + /// + /// If you'd like to pop the first element, consider using + /// [`VecDeque::pop_front`] instead. + /// + /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 3]; + /// assert_eq!(vec.pop(), Some(3)); + /// assert_eq!(vec, [1, 2]); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn pop(&mut self) -> Option<T> { + if self.len == 0 { + None + } else { + unsafe { + self.len -= 1; + Some(ptr::read(self.as_ptr().add(self.len()))) + } + } + } + + /// Moves all the elements of `other` into `self`, leaving `other` empty. + /// + /// # Panics + /// + /// Panics if the new capacity exceeds `isize::MAX` bytes. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 3]; + /// let mut vec2 = vec![4, 5, 6]; + /// vec.append(&mut vec2); + /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); + /// assert_eq!(vec2, []); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "append", since = "1.4.0")] + pub fn append(&mut self, other: &mut Self) { + unsafe { + self.append_elements(other.as_slice() as _); + other.set_len(0); + } + } + + /// Appends elements to `self` from other buffer. + #[cfg(not(no_global_oom_handling))] + #[inline] + unsafe fn append_elements(&mut self, other: *const [T]) { + let count = unsafe { (*other).len() }; + self.reserve(count); + let len = self.len(); + unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; + self.len += count; + } + + /// Tries to append elements to `self` from other buffer. + #[inline] + unsafe fn try_append_elements(&mut self, other: *const [T]) -> Result<(), TryReserveError> { + let count = unsafe { (*other).len() }; + self.try_reserve(count)?; + let len = self.len(); + unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; + self.len += count; + Ok(()) + } + + /// Removes the specified range from the vector in bulk, returning all + /// removed elements as an iterator. If the iterator is dropped before + /// being fully consumed, it drops the remaining removed elements. + /// + /// The returned iterator keeps a mutable borrow on the vector to optimize + /// its implementation. + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if + /// the end point is greater than the length of the vector. + /// + /// # Leaking + /// + /// If the returned iterator goes out of scope without being dropped (due to + /// [`mem::forget`], for example), the vector may have lost and leaked + /// elements arbitrarily, including elements outside the range. + /// + /// # Examples + /// + /// ``` + /// let mut v = vec![1, 2, 3]; + /// let u: Vec<_> = v.drain(1..).collect(); + /// assert_eq!(v, &[1]); + /// assert_eq!(u, &[2, 3]); + /// + /// // A full range clears the vector, like `clear()` does + /// v.drain(..); + /// assert_eq!(v, &[]); + /// ``` + #[stable(feature = "drain", since = "1.6.0")] + pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A> + where + R: RangeBounds<usize>, + { + // Memory safety + // + // When the Drain is first created, it shortens the length of + // the source vector to make sure no uninitialized or moved-from elements + // are accessible at all if the Drain's destructor never gets to run. + // + // Drain will ptr::read out the values to remove. + // When finished, remaining tail of the vec is copied back to cover + // the hole, and the vector length is restored to the new length. + // + let len = self.len(); + let Range { start, end } = slice::range(range, ..len); + + unsafe { + // set self.vec length's to start, to be safe in case Drain is leaked + self.set_len(start); + let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start); + Drain { + tail_start: end, + tail_len: len - end, + iter: range_slice.iter(), + vec: NonNull::from(self), + } + } + } + + /// Clears the vector, removing all values. + /// + /// Note that this method has no effect on the allocated capacity + /// of the vector. + /// + /// # Examples + /// + /// ``` + /// let mut v = vec![1, 2, 3]; + /// + /// v.clear(); + /// + /// assert!(v.is_empty()); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn clear(&mut self) { + let elems: *mut [T] = self.as_mut_slice(); + + // SAFETY: + // - `elems` comes directly from `as_mut_slice` and is therefore valid. + // - Setting `self.len` before calling `drop_in_place` means that, + // if an element's `Drop` impl panics, the vector's `Drop` impl will + // do nothing (leaking the rest of the elements) instead of dropping + // some twice. + unsafe { + self.len = 0; + ptr::drop_in_place(elems); + } + } + + /// Returns the number of elements in the vector, also referred to + /// as its 'length'. + /// + /// # Examples + /// + /// ``` + /// let a = vec![1, 2, 3]; + /// assert_eq!(a.len(), 3); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn len(&self) -> usize { + self.len + } + + /// Returns `true` if the vector contains no elements. + /// + /// # Examples + /// + /// ``` + /// let mut v = Vec::new(); + /// assert!(v.is_empty()); + /// + /// v.push(1); + /// assert!(!v.is_empty()); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Splits the collection into two at the given index. + /// + /// Returns a newly allocated vector containing the elements in the range + /// `[at, len)`. After the call, the original vector will be left containing + /// the elements `[0, at)` with its previous capacity unchanged. + /// + /// # Panics + /// + /// Panics if `at > len`. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 3]; + /// let vec2 = vec.split_off(1); + /// assert_eq!(vec, [1]); + /// assert_eq!(vec2, [2, 3]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[inline] + #[must_use = "use `.truncate()` if you don't need the other half"] + #[stable(feature = "split_off", since = "1.4.0")] + pub fn split_off(&mut self, at: usize) -> Self + where + A: Clone, + { + #[cold] + #[inline(never)] + fn assert_failed(at: usize, len: usize) -> ! { + panic!("`at` split index (is {at}) should be <= len (is {len})"); + } + + if at > self.len() { + assert_failed(at, self.len()); + } + + if at == 0 { + // the new vector can take over the original buffer and avoid the copy + return mem::replace( + self, + Vec::with_capacity_in(self.capacity(), self.allocator().clone()), + ); + } + + let other_len = self.len - at; + let mut other = Vec::with_capacity_in(other_len, self.allocator().clone()); + + // Unsafely `set_len` and copy items to `other`. + unsafe { + self.set_len(at); + other.set_len(other_len); + + ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len()); + } + other + } + + /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. + /// + /// If `new_len` is greater than `len`, the `Vec` is extended by the + /// difference, with each additional slot filled with the result of + /// calling the closure `f`. The return values from `f` will end up + /// in the `Vec` in the order they have been generated. + /// + /// If `new_len` is less than `len`, the `Vec` is simply truncated. + /// + /// This method uses a closure to create new values on every push. If + /// you'd rather [`Clone`] a given value, use [`Vec::resize`]. If you + /// want to use the [`Default`] trait to generate values, you can + /// pass [`Default::default`] as the second argument. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 3]; + /// vec.resize_with(5, Default::default); + /// assert_eq!(vec, [1, 2, 3, 0, 0]); + /// + /// let mut vec = vec![]; + /// let mut p = 1; + /// vec.resize_with(4, || { p *= 2; p }); + /// assert_eq!(vec, [2, 4, 8, 16]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "vec_resize_with", since = "1.33.0")] + pub fn resize_with<F>(&mut self, new_len: usize, f: F) + where + F: FnMut() -> T, + { + let len = self.len(); + if new_len > len { + self.extend_trusted(iter::repeat_with(f).take(new_len - len)); + } else { + self.truncate(new_len); + } + } + + /// Consumes and leaks the `Vec`, returning a mutable reference to the contents, + /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime + /// `'a`. If the type has only static references, or none at all, then this + /// may be chosen to be `'static`. + /// + /// As of Rust 1.57, this method does not reallocate or shrink the `Vec`, + /// so the leaked allocation may include unused capacity that is not part + /// of the returned slice. + /// + /// This function is mainly useful for data that lives for the remainder of + /// the program's life. Dropping the returned reference will cause a memory + /// leak. + /// + /// # Examples + /// + /// Simple usage: + /// + /// ``` + /// let x = vec![1, 2, 3]; + /// let static_ref: &'static mut [usize] = x.leak(); + /// static_ref[0] += 1; + /// assert_eq!(static_ref, &[2, 2, 3]); + /// ``` + #[stable(feature = "vec_leak", since = "1.47.0")] + #[inline] + pub fn leak<'a>(self) -> &'a mut [T] + where + A: 'a, + { + let mut me = ManuallyDrop::new(self); + unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len) } + } + + /// Returns the remaining spare capacity of the vector as a slice of + /// `MaybeUninit<T>`. + /// + /// The returned slice can be used to fill the vector with data (e.g. by + /// reading from a file) before marking the data as initialized using the + /// [`set_len`] method. + /// + /// [`set_len`]: Vec::set_len + /// + /// # Examples + /// + /// ``` + /// // Allocate vector big enough for 10 elements. + /// let mut v = Vec::with_capacity(10); + /// + /// // Fill in the first 3 elements. + /// let uninit = v.spare_capacity_mut(); + /// uninit[0].write(0); + /// uninit[1].write(1); + /// uninit[2].write(2); + /// + /// // Mark the first 3 elements of the vector as being initialized. + /// unsafe { + /// v.set_len(3); + /// } + /// + /// assert_eq!(&v, &[0, 1, 2]); + /// ``` + #[stable(feature = "vec_spare_capacity", since = "1.60.0")] + #[inline] + pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] { + // Note: + // This method is not implemented in terms of `split_at_spare_mut`, + // to prevent invalidation of pointers to the buffer. + unsafe { + slice::from_raw_parts_mut( + self.as_mut_ptr().add(self.len) as *mut MaybeUninit<T>, + self.buf.capacity() - self.len, + ) + } + } + + /// Returns vector content as a slice of `T`, along with the remaining spare + /// capacity of the vector as a slice of `MaybeUninit<T>`. + /// + /// The returned spare capacity slice can be used to fill the vector with data + /// (e.g. by reading from a file) before marking the data as initialized using + /// the [`set_len`] method. + /// + /// [`set_len`]: Vec::set_len + /// + /// Note that this is a low-level API, which should be used with care for + /// optimization purposes. If you need to append data to a `Vec` + /// you can use [`push`], [`extend`], [`extend_from_slice`], + /// [`extend_from_within`], [`insert`], [`append`], [`resize`] or + /// [`resize_with`], depending on your exact needs. + /// + /// [`push`]: Vec::push + /// [`extend`]: Vec::extend + /// [`extend_from_slice`]: Vec::extend_from_slice + /// [`extend_from_within`]: Vec::extend_from_within + /// [`insert`]: Vec::insert + /// [`append`]: Vec::append + /// [`resize`]: Vec::resize + /// [`resize_with`]: Vec::resize_with + /// + /// # Examples + /// + /// ``` + /// #![feature(vec_split_at_spare)] + /// + /// let mut v = vec![1, 1, 2]; + /// + /// // Reserve additional space big enough for 10 elements. + /// v.reserve(10); + /// + /// let (init, uninit) = v.split_at_spare_mut(); + /// let sum = init.iter().copied().sum::<u32>(); + /// + /// // Fill in the next 4 elements. + /// uninit[0].write(sum); + /// uninit[1].write(sum * 2); + /// uninit[2].write(sum * 3); + /// uninit[3].write(sum * 4); + /// + /// // Mark the 4 elements of the vector as being initialized. + /// unsafe { + /// let len = v.len(); + /// v.set_len(len + 4); + /// } + /// + /// assert_eq!(&v, &[1, 1, 2, 4, 8, 12, 16]); + /// ``` + #[unstable(feature = "vec_split_at_spare", issue = "81944")] + #[inline] + pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) { + // SAFETY: + // - len is ignored and so never changed + let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() }; + (init, spare) + } + + /// Safety: changing returned .2 (&mut usize) is considered the same as calling `.set_len(_)`. + /// + /// This method provides unique access to all vec parts at once in `extend_from_within`. + unsafe fn split_at_spare_mut_with_len( + &mut self, + ) -> (&mut [T], &mut [MaybeUninit<T>], &mut usize) { + let ptr = self.as_mut_ptr(); + // SAFETY: + // - `ptr` is guaranteed to be valid for `self.len` elements + // - but the allocation extends out to `self.buf.capacity()` elements, possibly + // uninitialized + let spare_ptr = unsafe { ptr.add(self.len) }; + let spare_ptr = spare_ptr.cast::<MaybeUninit<T>>(); + let spare_len = self.buf.capacity() - self.len; + + // SAFETY: + // - `ptr` is guaranteed to be valid for `self.len` elements + // - `spare_ptr` is pointing one element past the buffer, so it doesn't overlap with `initialized` + unsafe { + let initialized = slice::from_raw_parts_mut(ptr, self.len); + let spare = slice::from_raw_parts_mut(spare_ptr, spare_len); + + (initialized, spare, &mut self.len) + } + } +} + +impl<T: Clone, A: Allocator> Vec<T, A> { + /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. + /// + /// If `new_len` is greater than `len`, the `Vec` is extended by the + /// difference, with each additional slot filled with `value`. + /// If `new_len` is less than `len`, the `Vec` is simply truncated. + /// + /// This method requires `T` to implement [`Clone`], + /// in order to be able to clone the passed value. + /// If you need more flexibility (or want to rely on [`Default`] instead of + /// [`Clone`]), use [`Vec::resize_with`]. + /// If you only need to resize to a smaller size, use [`Vec::truncate`]. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec!["hello"]; + /// vec.resize(3, "world"); + /// assert_eq!(vec, ["hello", "world", "world"]); + /// + /// let mut vec = vec![1, 2, 3, 4]; + /// vec.resize(2, 0); + /// assert_eq!(vec, [1, 2]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "vec_resize", since = "1.5.0")] + pub fn resize(&mut self, new_len: usize, value: T) { + let len = self.len(); + + if new_len > len { + self.extend_with(new_len - len, ExtendElement(value)) + } else { + self.truncate(new_len); + } + } + + /// Tries to resize the `Vec` in-place so that `len` is equal to `new_len`. + /// + /// If `new_len` is greater than `len`, the `Vec` is extended by the + /// difference, with each additional slot filled with `value`. + /// If `new_len` is less than `len`, the `Vec` is simply truncated. + /// + /// This method requires `T` to implement [`Clone`], + /// in order to be able to clone the passed value. + /// If you need more flexibility (or want to rely on [`Default`] instead of + /// [`Clone`]), use [`Vec::resize_with`]. + /// If you only need to resize to a smaller size, use [`Vec::truncate`]. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec!["hello"]; + /// vec.try_resize(3, "world").unwrap(); + /// assert_eq!(vec, ["hello", "world", "world"]); + /// + /// let mut vec = vec![1, 2, 3, 4]; + /// vec.try_resize(2, 0).unwrap(); + /// assert_eq!(vec, [1, 2]); + /// + /// let mut vec = vec![42]; + /// let result = vec.try_resize(usize::MAX, 0); + /// assert!(result.is_err()); + /// ``` + #[stable(feature = "kernel", since = "1.0.0")] + pub fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), TryReserveError> { + let len = self.len(); + + if new_len > len { + self.try_extend_with(new_len - len, ExtendElement(value)) + } else { + self.truncate(new_len); + Ok(()) + } + } + + /// Clones and appends all elements in a slice to the `Vec`. + /// + /// Iterates over the slice `other`, clones each element, and then appends + /// it to this `Vec`. The `other` slice is traversed in-order. + /// + /// Note that this function is same as [`extend`] except that it is + /// specialized to work with slices instead. If and when Rust gets + /// specialization this function will likely be deprecated (but still + /// available). + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1]; + /// vec.extend_from_slice(&[2, 3, 4]); + /// assert_eq!(vec, [1, 2, 3, 4]); + /// ``` + /// + /// [`extend`]: Vec::extend + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "vec_extend_from_slice", since = "1.6.0")] + pub fn extend_from_slice(&mut self, other: &[T]) { + self.spec_extend(other.iter()) + } + + /// Tries to clone and append all elements in a slice to the `Vec`. + /// + /// Iterates over the slice `other`, clones each element, and then appends + /// it to this `Vec`. The `other` slice is traversed in-order. + /// + /// Note that this function is same as [`extend`] except that it is + /// specialized to work with slices instead. If and when Rust gets + /// specialization this function will likely be deprecated (but still + /// available). + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1]; + /// vec.try_extend_from_slice(&[2, 3, 4]).unwrap(); + /// assert_eq!(vec, [1, 2, 3, 4]); + /// ``` + /// + /// [`extend`]: Vec::extend + #[stable(feature = "kernel", since = "1.0.0")] + pub fn try_extend_from_slice(&mut self, other: &[T]) -> Result<(), TryReserveError> { + self.try_spec_extend(other.iter()) + } + + /// Copies elements from `src` range to the end of the vector. + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if + /// the end point is greater than the length of the vector. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![0, 1, 2, 3, 4]; + /// + /// vec.extend_from_within(2..); + /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4]); + /// + /// vec.extend_from_within(..2); + /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1]); + /// + /// vec.extend_from_within(4..8); + /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1, 4, 2, 3, 4]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[stable(feature = "vec_extend_from_within", since = "1.53.0")] + pub fn extend_from_within<R>(&mut self, src: R) + where + R: RangeBounds<usize>, + { + let range = slice::range(src, ..self.len()); + self.reserve(range.len()); + + // SAFETY: + // - `slice::range` guarantees that the given range is valid for indexing self + unsafe { + self.spec_extend_from_within(range); + } + } +} + +impl<T, A: Allocator, const N: usize> Vec<[T; N], A> { + /// Takes a `Vec<[T; N]>` and flattens it into a `Vec<T>`. + /// + /// # Panics + /// + /// Panics if the length of the resulting vector would overflow a `usize`. + /// + /// This is only possible when flattening a vector of arrays of zero-sized + /// types, and thus tends to be irrelevant in practice. If + /// `size_of::<T>() > 0`, this will never panic. + /// + /// # Examples + /// + /// ``` + /// #![feature(slice_flatten)] + /// + /// let mut vec = vec![[1, 2, 3], [4, 5, 6], [7, 8, 9]]; + /// assert_eq!(vec.pop(), Some([7, 8, 9])); + /// + /// let mut flattened = vec.into_flattened(); + /// assert_eq!(flattened.pop(), Some(6)); + /// ``` + #[unstable(feature = "slice_flatten", issue = "95629")] + pub fn into_flattened(self) -> Vec<T, A> { + let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc(); + let (new_len, new_cap) = if T::IS_ZST { + (len.checked_mul(N).expect("vec len overflow"), usize::MAX) + } else { + // SAFETY: + // - `cap * N` cannot overflow because the allocation is already in + // the address space. + // - Each `[T; N]` has `N` valid elements, so there are `len * N` + // valid elements in the allocation. + unsafe { (len.unchecked_mul(N), cap.unchecked_mul(N)) } + }; + // SAFETY: + // - `ptr` was allocated by `self` + // - `ptr` is well-aligned because `[T; N]` has the same alignment as `T`. + // - `new_cap` refers to the same sized allocation as `cap` because + // `new_cap * size_of::<T>()` == `cap * size_of::<[T; N]>()` + // - `len` <= `cap`, so `len * N` <= `cap * N`. + unsafe { Vec::<T, A>::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) } + } +} + +// This code generalizes `extend_with_{element,default}`. +trait ExtendWith<T> { + fn next(&mut self) -> T; + fn last(self) -> T; +} + +struct ExtendElement<T>(T); +impl<T: Clone> ExtendWith<T> for ExtendElement<T> { + fn next(&mut self) -> T { + self.0.clone() + } + fn last(self) -> T { + self.0 + } +} + +impl<T, A: Allocator> Vec<T, A> { + #[cfg(not(no_global_oom_handling))] + /// Extend the vector by `n` values, using the given generator. + fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) { + self.reserve(n); + + unsafe { + let mut ptr = self.as_mut_ptr().add(self.len()); + // Use SetLenOnDrop to work around bug where compiler + // might not realize the store through `ptr` through self.set_len() + // don't alias. + let mut local_len = SetLenOnDrop::new(&mut self.len); + + // Write all elements except the last one + for _ in 1..n { + ptr::write(ptr, value.next()); + ptr = ptr.add(1); + // Increment the length in every step in case next() panics + local_len.increment_len(1); + } + + if n > 0 { + // We can write the last element directly without cloning needlessly + ptr::write(ptr, value.last()); + local_len.increment_len(1); + } + + // len set by scope guard + } + } + + /// Try to extend the vector by `n` values, using the given generator. + fn try_extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) -> Result<(), TryReserveError> { + self.try_reserve(n)?; + + unsafe { + let mut ptr = self.as_mut_ptr().add(self.len()); + // Use SetLenOnDrop to work around bug where compiler + // might not realize the store through `ptr` through self.set_len() + // don't alias. + let mut local_len = SetLenOnDrop::new(&mut self.len); + + // Write all elements except the last one + for _ in 1..n { + ptr::write(ptr, value.next()); + ptr = ptr.add(1); + // Increment the length in every step in case next() panics + local_len.increment_len(1); + } + + if n > 0 { + // We can write the last element directly without cloning needlessly + ptr::write(ptr, value.last()); + local_len.increment_len(1); + } + + // len set by scope guard + Ok(()) + } + } +} + +impl<T: PartialEq, A: Allocator> Vec<T, A> { + /// Removes consecutive repeated elements in the vector according to the + /// [`PartialEq`] trait implementation. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// let mut vec = vec![1, 2, 2, 3, 2]; + /// + /// vec.dedup(); + /// + /// assert_eq!(vec, [1, 2, 3, 2]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + #[inline] + pub fn dedup(&mut self) { + self.dedup_by(|a, b| a == b) + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Internal methods and functions +//////////////////////////////////////////////////////////////////////////////// + +#[doc(hidden)] +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "rust1", since = "1.0.0")] +pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> { + <T as SpecFromElem>::from_elem(elem, n, Global) +} + +#[doc(hidden)] +#[cfg(not(no_global_oom_handling))] +#[unstable(feature = "allocator_api", issue = "32838")] +pub fn from_elem_in<T: Clone, A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> { + <T as SpecFromElem>::from_elem(elem, n, alloc) +} + +trait ExtendFromWithinSpec { + /// # Safety + /// + /// - `src` needs to be valid index + /// - `self.capacity() - self.len()` must be `>= src.len()` + unsafe fn spec_extend_from_within(&mut self, src: Range<usize>); +} + +impl<T: Clone, A: Allocator> ExtendFromWithinSpec for Vec<T, A> { + default unsafe fn spec_extend_from_within(&mut self, src: Range<usize>) { + // SAFETY: + // - len is increased only after initializing elements + let (this, spare, len) = unsafe { self.split_at_spare_mut_with_len() }; + + // SAFETY: + // - caller guarantees that src is a valid index + let to_clone = unsafe { this.get_unchecked(src) }; + + iter::zip(to_clone, spare) + .map(|(src, dst)| dst.write(src.clone())) + // Note: + // - Element was just initialized with `MaybeUninit::write`, so it's ok to increase len + // - len is increased after each element to prevent leaks (see issue #82533) + .for_each(|_| *len += 1); + } +} + +impl<T: Copy, A: Allocator> ExtendFromWithinSpec for Vec<T, A> { + unsafe fn spec_extend_from_within(&mut self, src: Range<usize>) { + let count = src.len(); + { + let (init, spare) = self.split_at_spare_mut(); + + // SAFETY: + // - caller guarantees that `src` is a valid index + let source = unsafe { init.get_unchecked(src) }; + + // SAFETY: + // - Both pointers are created from unique slice references (`&mut [_]`) + // so they are valid and do not overlap. + // - Elements are :Copy so it's OK to copy them, without doing + // anything with the original values + // - `count` is equal to the len of `source`, so source is valid for + // `count` reads + // - `.reserve(count)` guarantees that `spare.len() >= count` so spare + // is valid for `count` writes + unsafe { ptr::copy_nonoverlapping(source.as_ptr(), spare.as_mut_ptr() as _, count) }; + } + + // SAFETY: + // - The elements were just initialized by `copy_nonoverlapping` + self.len += count; + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Common trait implementations for Vec +//////////////////////////////////////////////////////////////////////////////// + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> ops::Deref for Vec<T, A> { + type Target = [T]; + + #[inline] + fn deref(&self) -> &[T] { + unsafe { slice::from_raw_parts(self.as_ptr(), self.len) } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> ops::DerefMut for Vec<T, A> { + #[inline] + fn deref_mut(&mut self) -> &mut [T] { + unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "rust1", since = "1.0.0")] +impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> { + #[cfg(not(test))] + fn clone(&self) -> Self { + let alloc = self.allocator().clone(); + <[T]>::to_vec_in(&**self, alloc) + } + + // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is + // required for this method definition, is not available. Instead use the + // `slice::to_vec` function which is only available with cfg(test) + // NB see the slice::hack module in slice.rs for more information + #[cfg(test)] + fn clone(&self) -> Self { + let alloc = self.allocator().clone(); + crate::slice::to_vec(&**self, alloc) + } + + fn clone_from(&mut self, other: &Self) { + crate::slice::SpecCloneIntoVec::clone_into(other.as_slice(), self); + } +} + +/// The hash of a vector is the same as that of the corresponding slice, +/// as required by the `core::borrow::Borrow` implementation. +/// +/// ``` +/// use std::hash::BuildHasher; +/// +/// let b = std::collections::hash_map::RandomState::new(); +/// let v: Vec<u8> = vec![0xa8, 0x3c, 0x09]; +/// let s: &[u8] = &[0xa8, 0x3c, 0x09]; +/// assert_eq!(b.hash_one(v), b.hash_one(s)); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +impl<T: Hash, A: Allocator> Hash for Vec<T, A> { + #[inline] + fn hash<H: Hasher>(&self, state: &mut H) { + Hash::hash(&**self, state) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented( + message = "vector indices are of type `usize` or ranges of `usize`", + label = "vector indices are of type `usize` or ranges of `usize`" +)] +impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for Vec<T, A> { + type Output = I::Output; + + #[inline] + fn index(&self, index: I) -> &Self::Output { + Index::index(&**self, index) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented( + message = "vector indices are of type `usize` or ranges of `usize`", + label = "vector indices are of type `usize` or ranges of `usize`" +)] +impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> { + #[inline] + fn index_mut(&mut self, index: I) -> &mut Self::Output { + IndexMut::index_mut(&mut **self, index) + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "rust1", since = "1.0.0")] +impl<T> FromIterator<T> for Vec<T> { + #[inline] + fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> { + <Self as SpecFromIter<T, I::IntoIter>>::from_iter(iter.into_iter()) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> IntoIterator for Vec<T, A> { + type Item = T; + type IntoIter = IntoIter<T, A>; + + /// Creates a consuming iterator, that is, one that moves each value out of + /// the vector (from start to end). The vector cannot be used after calling + /// this. + /// + /// # Examples + /// + /// ``` + /// let v = vec!["a".to_string(), "b".to_string()]; + /// let mut v_iter = v.into_iter(); + /// + /// let first_element: Option<String> = v_iter.next(); + /// + /// assert_eq!(first_element, Some("a".to_string())); + /// assert_eq!(v_iter.next(), Some("b".to_string())); + /// assert_eq!(v_iter.next(), None); + /// ``` + #[inline] + fn into_iter(self) -> Self::IntoIter { + unsafe { + let mut me = ManuallyDrop::new(self); + let alloc = ManuallyDrop::new(ptr::read(me.allocator())); + let begin = me.as_mut_ptr(); + let end = if T::IS_ZST { + begin.wrapping_byte_add(me.len()) + } else { + begin.add(me.len()) as *const T + }; + let cap = me.buf.capacity(); + IntoIter { + buf: NonNull::new_unchecked(begin), + phantom: PhantomData, + cap, + alloc, + ptr: begin, + end, + } + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, T, A: Allocator> IntoIterator for &'a Vec<T, A> { + type Item = &'a T; + type IntoIter = slice::Iter<'a, T>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> { + type Item = &'a mut T; + type IntoIter = slice::IterMut<'a, T>; + + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> Extend<T> for Vec<T, A> { + #[inline] + fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { + <Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter()) + } + + #[inline] + fn extend_one(&mut self, item: T) { + self.push(item); + } + + #[inline] + fn extend_reserve(&mut self, additional: usize) { + self.reserve(additional); + } +} + +impl<T, A: Allocator> Vec<T, A> { + // leaf method to which various SpecFrom/SpecExtend implementations delegate when + // they have no further optimizations to apply + #[cfg(not(no_global_oom_handling))] + fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) { + // This is the case for a general iterator. + // + // This function should be the moral equivalent of: + // + // for item in iterator { + // self.push(item); + // } + while let Some(element) = iterator.next() { + let len = self.len(); + if len == self.capacity() { + let (lower, _) = iterator.size_hint(); + self.reserve(lower.saturating_add(1)); + } + unsafe { + ptr::write(self.as_mut_ptr().add(len), element); + // Since next() executes user code which can panic we have to bump the length + // after each step. + // NB can't overflow since we would have had to alloc the address space + self.set_len(len + 1); + } + } + } + + // leaf method to which various SpecFrom/SpecExtend implementations delegate when + // they have no further optimizations to apply + fn try_extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) -> Result<(), TryReserveError> { + // This is the case for a general iterator. + // + // This function should be the moral equivalent of: + // + // for item in iterator { + // self.push(item); + // } + while let Some(element) = iterator.next() { + let len = self.len(); + if len == self.capacity() { + let (lower, _) = iterator.size_hint(); + self.try_reserve(lower.saturating_add(1))?; + } + unsafe { + ptr::write(self.as_mut_ptr().add(len), element); + // Since next() executes user code which can panic we have to bump the length + // after each step. + // NB can't overflow since we would have had to alloc the address space + self.set_len(len + 1); + } + } + + Ok(()) + } + + // specific extend for `TrustedLen` iterators, called both by the specializations + // and internal places where resolving specialization makes compilation slower + #[cfg(not(no_global_oom_handling))] + fn extend_trusted(&mut self, iterator: impl iter::TrustedLen<Item = T>) { + let (low, high) = iterator.size_hint(); + if let Some(additional) = high { + debug_assert_eq!( + low, + additional, + "TrustedLen iterator's size hint is not exact: {:?}", + (low, high) + ); + self.reserve(additional); + unsafe { + let ptr = self.as_mut_ptr(); + let mut local_len = SetLenOnDrop::new(&mut self.len); + iterator.for_each(move |element| { + ptr::write(ptr.add(local_len.current_len()), element); + // Since the loop executes user code which can panic we have to update + // the length every step to correctly drop what we've written. + // NB can't overflow since we would have had to alloc the address space + local_len.increment_len(1); + }); + } + } else { + // Per TrustedLen contract a `None` upper bound means that the iterator length + // truly exceeds usize::MAX, which would eventually lead to a capacity overflow anyway. + // Since the other branch already panics eagerly (via `reserve()`) we do the same here. + // This avoids additional codegen for a fallback code path which would eventually + // panic anyway. + panic!("capacity overflow"); + } + } + + // specific extend for `TrustedLen` iterators, called both by the specializations + // and internal places where resolving specialization makes compilation slower + fn try_extend_trusted(&mut self, iterator: impl iter::TrustedLen<Item = T>) -> Result<(), TryReserveError> { + let (low, high) = iterator.size_hint(); + if let Some(additional) = high { + debug_assert_eq!( + low, + additional, + "TrustedLen iterator's size hint is not exact: {:?}", + (low, high) + ); + self.try_reserve(additional)?; + unsafe { + let ptr = self.as_mut_ptr(); + let mut local_len = SetLenOnDrop::new(&mut self.len); + iterator.for_each(move |element| { + ptr::write(ptr.add(local_len.current_len()), element); + // Since the loop executes user code which can panic we have to update + // the length every step to correctly drop what we've written. + // NB can't overflow since we would have had to alloc the address space + local_len.increment_len(1); + }); + } + Ok(()) + } else { + Err(TryReserveErrorKind::CapacityOverflow.into()) + } + } + + /// Creates a splicing iterator that replaces the specified range in the vector + /// with the given `replace_with` iterator and yields the removed items. + /// `replace_with` does not need to be the same length as `range`. + /// + /// `range` is removed even if the iterator is not consumed until the end. + /// + /// It is unspecified how many elements are removed from the vector + /// if the `Splice` value is leaked. + /// + /// The input iterator `replace_with` is only consumed when the `Splice` value is dropped. + /// + /// This is optimal if: + /// + /// * The tail (elements in the vector after `range`) is empty, + /// * or `replace_with` yields fewer or equal elements than `range`’s length + /// * or the lower bound of its `size_hint()` is exact. + /// + /// Otherwise, a temporary vector is allocated and the tail is moved twice. + /// + /// # Panics + /// + /// Panics if the starting point is greater than the end point or if + /// the end point is greater than the length of the vector. + /// + /// # Examples + /// + /// ``` + /// let mut v = vec![1, 2, 3, 4]; + /// let new = [7, 8, 9]; + /// let u: Vec<_> = v.splice(1..3, new).collect(); + /// assert_eq!(v, &[1, 7, 8, 9, 4]); + /// assert_eq!(u, &[2, 3]); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[inline] + #[stable(feature = "vec_splice", since = "1.21.0")] + pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A> + where + R: RangeBounds<usize>, + I: IntoIterator<Item = T>, + { + Splice { drain: self.drain(range), replace_with: replace_with.into_iter() } + } + + /// Creates an iterator which uses a closure to determine if an element should be removed. + /// + /// If the closure returns true, then the element is removed and yielded. + /// If the closure returns false, the element will remain in the vector and will not be yielded + /// by the iterator. + /// + /// Using this method is equivalent to the following code: + /// + /// ``` + /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 }; + /// # let mut vec = vec![1, 2, 3, 4, 5, 6]; + /// let mut i = 0; + /// while i < vec.len() { + /// if some_predicate(&mut vec[i]) { + /// let val = vec.remove(i); + /// // your code here + /// } else { + /// i += 1; + /// } + /// } + /// + /// # assert_eq!(vec, vec![1, 4, 5]); + /// ``` + /// + /// But `drain_filter` is easier to use. `drain_filter` is also more efficient, + /// because it can backshift the elements of the array in bulk. + /// + /// Note that `drain_filter` also lets you mutate every element in the filter closure, + /// regardless of whether you choose to keep or remove it. + /// + /// # Examples + /// + /// Splitting an array into evens and odds, reusing the original allocation: + /// + /// ``` + /// #![feature(drain_filter)] + /// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]; + /// + /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>(); + /// let odds = numbers; + /// + /// assert_eq!(evens, vec![2, 4, 6, 8, 14]); + /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); + /// ``` + #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] + pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A> + where + F: FnMut(&mut T) -> bool, + { + let old_len = self.len(); + + // Guard against us getting leaked (leak amplification) + unsafe { + self.set_len(0); + } + + DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false } + } +} + +/// Extend implementation that copies elements out of references before pushing them onto the Vec. +/// +/// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to +/// append the entire slice at once. +/// +/// [`copy_from_slice`]: slice::copy_from_slice +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "extend_ref", since = "1.2.0")] +impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec<T, A> { + fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) { + self.spec_extend(iter.into_iter()) + } + + #[inline] + fn extend_one(&mut self, &item: &'a T) { + self.push(item); + } + + #[inline] + fn extend_reserve(&mut self, additional: usize) { + self.reserve(additional); + } +} + +/// Implements comparison of vectors, [lexicographically](Ord#lexicographical-comparison). +#[stable(feature = "rust1", since = "1.0.0")] +impl<T: PartialOrd, A: Allocator> PartialOrd for Vec<T, A> { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + PartialOrd::partial_cmp(&**self, &**other) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T: Eq, A: Allocator> Eq for Vec<T, A> {} + +/// Implements ordering of vectors, [lexicographically](Ord#lexicographical-comparison). +#[stable(feature = "rust1", since = "1.0.0")] +impl<T: Ord, A: Allocator> Ord for Vec<T, A> { + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + Ord::cmp(&**self, &**other) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec<T, A> { + fn drop(&mut self) { + unsafe { + // use drop for [T] + // use a raw slice to refer to the elements of the vector as weakest necessary type; + // could avoid questions of validity in certain cases + ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) + } + // RawVec handles deallocation + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T> Default for Vec<T> { + /// Creates an empty `Vec<T>`. + /// + /// The vector will not allocate until elements are pushed onto it. + fn default() -> Vec<T> { + Vec::new() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T: fmt::Debug, A: Allocator> fmt::Debug for Vec<T, A> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> AsRef<Vec<T, A>> for Vec<T, A> { + fn as_ref(&self) -> &Vec<T, A> { + self + } +} + +#[stable(feature = "vec_as_mut", since = "1.5.0")] +impl<T, A: Allocator> AsMut<Vec<T, A>> for Vec<T, A> { + fn as_mut(&mut self) -> &mut Vec<T, A> { + self + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<T, A: Allocator> AsRef<[T]> for Vec<T, A> { + fn as_ref(&self) -> &[T] { + self + } +} + +#[stable(feature = "vec_as_mut", since = "1.5.0")] +impl<T, A: Allocator> AsMut<[T]> for Vec<T, A> { + fn as_mut(&mut self) -> &mut [T] { + self + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "rust1", since = "1.0.0")] +impl<T: Clone> From<&[T]> for Vec<T> { + /// Allocate a `Vec<T>` and fill it by cloning `s`'s items. + /// + /// # Examples + /// + /// ``` + /// assert_eq!(Vec::from(&[1, 2, 3][..]), vec![1, 2, 3]); + /// ``` + #[cfg(not(test))] + fn from(s: &[T]) -> Vec<T> { + s.to_vec() + } + #[cfg(test)] + fn from(s: &[T]) -> Vec<T> { + crate::slice::to_vec(s, Global) + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "vec_from_mut", since = "1.19.0")] +impl<T: Clone> From<&mut [T]> for Vec<T> { + /// Allocate a `Vec<T>` and fill it by cloning `s`'s items. + /// + /// # Examples + /// + /// ``` + /// assert_eq!(Vec::from(&mut [1, 2, 3][..]), vec![1, 2, 3]); + /// ``` + #[cfg(not(test))] + fn from(s: &mut [T]) -> Vec<T> { + s.to_vec() + } + #[cfg(test)] + fn from(s: &mut [T]) -> Vec<T> { + crate::slice::to_vec(s, Global) + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "vec_from_array", since = "1.44.0")] +impl<T, const N: usize> From<[T; N]> for Vec<T> { + /// Allocate a `Vec<T>` and move `s`'s items into it. + /// + /// # Examples + /// + /// ``` + /// assert_eq!(Vec::from([1, 2, 3]), vec![1, 2, 3]); + /// ``` + #[cfg(not(test))] + fn from(s: [T; N]) -> Vec<T> { + <[T]>::into_vec(Box::new(s)) + } + + #[cfg(test)] + fn from(s: [T; N]) -> Vec<T> { + crate::slice::into_vec(Box::new(s)) + } +} + +#[cfg(not(no_borrow))] +#[stable(feature = "vec_from_cow_slice", since = "1.14.0")] +impl<'a, T> From<Cow<'a, [T]>> for Vec<T> +where + [T]: ToOwned<Owned = Vec<T>>, +{ + /// Convert a clone-on-write slice into a vector. + /// + /// If `s` already owns a `Vec<T>`, it will be returned directly. + /// If `s` is borrowing a slice, a new `Vec<T>` will be allocated and + /// filled by cloning `s`'s items into it. + /// + /// # Examples + /// + /// ``` + /// # use std::borrow::Cow; + /// let o: Cow<'_, [i32]> = Cow::Owned(vec![1, 2, 3]); + /// let b: Cow<'_, [i32]> = Cow::Borrowed(&[1, 2, 3]); + /// assert_eq!(Vec::from(o), Vec::from(b)); + /// ``` + fn from(s: Cow<'a, [T]>) -> Vec<T> { + s.into_owned() + } +} + +// note: test pulls in std, which causes errors here +#[cfg(not(test))] +#[stable(feature = "vec_from_box", since = "1.18.0")] +impl<T, A: Allocator> From<Box<[T], A>> for Vec<T, A> { + /// Convert a boxed slice into a vector by transferring ownership of + /// the existing heap allocation. + /// + /// # Examples + /// + /// ``` + /// let b: Box<[i32]> = vec![1, 2, 3].into_boxed_slice(); + /// assert_eq!(Vec::from(b), vec![1, 2, 3]); + /// ``` + fn from(s: Box<[T], A>) -> Self { + s.into_vec() + } +} + +// note: test pulls in std, which causes errors here +#[cfg(not(no_global_oom_handling))] +#[cfg(not(test))] +#[stable(feature = "box_from_vec", since = "1.20.0")] +impl<T, A: Allocator> From<Vec<T, A>> for Box<[T], A> { + /// Convert a vector into a boxed slice. + /// + /// If `v` has excess capacity, its items will be moved into a + /// newly-allocated buffer with exactly the right capacity. + /// + /// # Examples + /// + /// ``` + /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice()); + /// ``` + /// + /// Any excess capacity is removed: + /// ``` + /// let mut vec = Vec::with_capacity(10); + /// vec.extend([1, 2, 3]); + /// + /// assert_eq!(Box::from(vec), vec![1, 2, 3].into_boxed_slice()); + /// ``` + fn from(v: Vec<T, A>) -> Self { + v.into_boxed_slice() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "rust1", since = "1.0.0")] +impl From<&str> for Vec<u8> { + /// Allocate a `Vec<u8>` and fill it with a UTF-8 string. + /// + /// # Examples + /// + /// ``` + /// assert_eq!(Vec::from("123"), vec![b'1', b'2', b'3']); + /// ``` + fn from(s: &str) -> Vec<u8> { + From::from(s.as_bytes()) + } +} + +#[stable(feature = "array_try_from_vec", since = "1.48.0")] +impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] { + type Error = Vec<T, A>; + + /// Gets the entire contents of the `Vec<T>` as an array, + /// if its size exactly matches that of the requested array. + /// + /// # Examples + /// + /// ``` + /// assert_eq!(vec![1, 2, 3].try_into(), Ok([1, 2, 3])); + /// assert_eq!(<Vec<i32>>::new().try_into(), Ok([])); + /// ``` + /// + /// If the length doesn't match, the input comes back in `Err`: + /// ``` + /// let r: Result<[i32; 4], _> = (0..10).collect::<Vec<_>>().try_into(); + /// assert_eq!(r, Err(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9])); + /// ``` + /// + /// If you're fine with just getting a prefix of the `Vec<T>`, + /// you can call [`.truncate(N)`](Vec::truncate) first. + /// ``` + /// let mut v = String::from("hello world").into_bytes(); + /// v.sort(); + /// v.truncate(2); + /// let [a, b]: [_; 2] = v.try_into().unwrap(); + /// assert_eq!(a, b' '); + /// assert_eq!(b, b'd'); + /// ``` + fn try_from(mut vec: Vec<T, A>) -> Result<[T; N], Vec<T, A>> { + if vec.len() != N { + return Err(vec); + } + + // SAFETY: `.set_len(0)` is always sound. + unsafe { vec.set_len(0) }; + + // SAFETY: A `Vec`'s pointer is always aligned properly, and + // the alignment the array needs is the same as the items. + // We checked earlier that we have sufficient items. + // The items will not double-drop as the `set_len` + // tells the `Vec` not to also drop them. + let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) }; + Ok(array) + } +} diff --git a/rust/alloc/vec/partial_eq.rs b/rust/alloc/vec/partial_eq.rs new file mode 100644 index 0000000000..10ad4e4922 --- /dev/null +++ b/rust/alloc/vec/partial_eq.rs @@ -0,0 +1,49 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +use crate::alloc::Allocator; +#[cfg(not(no_global_oom_handling))] +use crate::borrow::Cow; + +use super::Vec; + +macro_rules! __impl_slice_eq1 { + ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => { + #[$stability] + impl<T, U, $($vars)*> PartialEq<$rhs> for $lhs + where + T: PartialEq<U>, + $($ty: $bound)? + { + #[inline] + fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] } + #[inline] + fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] } + } + } +} + +__impl_slice_eq1! { [A1: Allocator, A2: Allocator] Vec<T, A1>, Vec<U, A2>, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &[U], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator] &[T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } +__impl_slice_eq1! { [A: Allocator] &mut [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] } +__impl_slice_eq1! { [A: Allocator] Vec<T, A>, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +__impl_slice_eq1! { [A: Allocator] [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] } +#[cfg(not(no_global_oom_handling))] +__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec<U, A> where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +#[cfg(not(no_global_oom_handling))] +__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +#[cfg(not(no_global_oom_handling))] +__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, [U; N], #[stable(feature = "rust1", since = "1.0.0")] } +__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] } + +// NOTE: some less important impls are omitted to reduce code bloat +// FIXME(Centril): Reconsider this? +//__impl_slice_eq1! { [const N: usize] Vec<A>, &mut [B; N], } +//__impl_slice_eq1! { [const N: usize] [A; N], Vec<B>, } +//__impl_slice_eq1! { [const N: usize] &[A; N], Vec<B>, } +//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec<B>, } +//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], } +//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], } +//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], } diff --git a/rust/alloc/vec/set_len_on_drop.rs b/rust/alloc/vec/set_len_on_drop.rs new file mode 100644 index 0000000000..d3c7297b80 --- /dev/null +++ b/rust/alloc/vec/set_len_on_drop.rs @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +// Set the length of the vec when the `SetLenOnDrop` value goes out of scope. +// +// The idea is: The length field in SetLenOnDrop is a local variable +// that the optimizer will see does not alias with any stores through the Vec's data +// pointer. This is a workaround for alias analysis issue #32155 +pub(super) struct SetLenOnDrop<'a> { + len: &'a mut usize, + local_len: usize, +} + +impl<'a> SetLenOnDrop<'a> { + #[inline] + pub(super) fn new(len: &'a mut usize) -> Self { + SetLenOnDrop { local_len: *len, len } + } + + #[inline] + pub(super) fn increment_len(&mut self, increment: usize) { + self.local_len += increment; + } + + #[inline] + pub(super) fn current_len(&self) -> usize { + self.local_len + } +} + +impl Drop for SetLenOnDrop<'_> { + #[inline] + fn drop(&mut self) { + *self.len = self.local_len; + } +} diff --git a/rust/alloc/vec/spec_extend.rs b/rust/alloc/vec/spec_extend.rs new file mode 100644 index 0000000000..a6a735201e --- /dev/null +++ b/rust/alloc/vec/spec_extend.rs @@ -0,0 +1,119 @@ +// SPDX-License-Identifier: Apache-2.0 OR MIT + +use crate::alloc::Allocator; +use crate::collections::TryReserveError; +use core::iter::TrustedLen; +use core::slice::{self}; + +use super::{IntoIter, Vec}; + +// Specialization trait used for Vec::extend +#[cfg(not(no_global_oom_handling))] +pub(super) trait SpecExtend<T, I> { + fn spec_extend(&mut self, iter: I); +} + +// Specialization trait used for Vec::try_extend +pub(super) trait TrySpecExtend<T, I> { + fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError>; +} + +#[cfg(not(no_global_oom_handling))] +impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A> +where + I: Iterator<Item = T>, +{ + default fn spec_extend(&mut self, iter: I) { + self.extend_desugared(iter) + } +} + +impl<T, I, A: Allocator> TrySpecExtend<T, I> for Vec<T, A> +where + I: Iterator<Item = T>, +{ + default fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError> { + self.try_extend_desugared(iter) + } +} + +#[cfg(not(no_global_oom_handling))] +impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A> +where + I: TrustedLen<Item = T>, +{ + default fn spec_extend(&mut self, iterator: I) { + self.extend_trusted(iterator) + } +} + +impl<T, I, A: Allocator> TrySpecExtend<T, I> for Vec<T, A> +where + I: TrustedLen<Item = T>, +{ + default fn try_spec_extend(&mut self, iterator: I) -> Result<(), TryReserveError> { + self.try_extend_trusted(iterator) + } +} + +#[cfg(not(no_global_oom_handling))] +impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> { + fn spec_extend(&mut self, mut iterator: IntoIter<T>) { + unsafe { + self.append_elements(iterator.as_slice() as _); + } + iterator.forget_remaining_elements(); + } +} + +impl<T, A: Allocator> TrySpecExtend<T, IntoIter<T>> for Vec<T, A> { + fn try_spec_extend(&mut self, mut iterator: IntoIter<T>) -> Result<(), TryReserveError> { + unsafe { + self.try_append_elements(iterator.as_slice() as _)?; + } + iterator.forget_remaining_elements(); + Ok(()) + } +} + +#[cfg(not(no_global_oom_handling))] +impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec<T, A> +where + I: Iterator<Item = &'a T>, + T: Clone, +{ + default fn spec_extend(&mut self, iterator: I) { + self.spec_extend(iterator.cloned()) + } +} + +impl<'a, T: 'a, I, A: Allocator + 'a> TrySpecExtend<&'a T, I> for Vec<T, A> +where + I: Iterator<Item = &'a T>, + T: Clone, +{ + default fn try_spec_extend(&mut self, iterator: I) -> Result<(), TryReserveError> { + self.try_spec_extend(iterator.cloned()) + } +} + +#[cfg(not(no_global_oom_handling))] +impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A> +where + T: Copy, +{ + fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { + let slice = iterator.as_slice(); + unsafe { self.append_elements(slice) }; + } +} + +impl<'a, T: 'a, A: Allocator + 'a> TrySpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A> +where + T: Copy, +{ + fn try_spec_extend(&mut self, iterator: slice::Iter<'a, T>) -> Result<(), TryReserveError> { + let slice = iterator.as_slice(); + unsafe { self.try_append_elements(slice) } + } +} |