summaryrefslogtreecommitdiffstats
path: root/rust/alloc
diff options
context:
space:
mode:
Diffstat (limited to 'rust/alloc')
-rw-r--r--rust/alloc/README.md36
-rw-r--r--rust/alloc/alloc.rs452
-rw-r--r--rust/alloc/boxed.rs2463
-rw-r--r--rust/alloc/collections/mod.rs160
-rw-r--r--rust/alloc/lib.rs288
-rw-r--r--rust/alloc/raw_vec.rs611
-rw-r--r--rust/alloc/slice.rs890
-rw-r--r--rust/alloc/vec/drain.rs255
-rw-r--r--rust/alloc/vec/extract_if.rs115
-rw-r--r--rust/alloc/vec/into_iter.rs454
-rw-r--r--rust/alloc/vec/is_zero.rs204
-rw-r--r--rust/alloc/vec/mod.rs3683
-rw-r--r--rust/alloc/vec/partial_eq.rs49
-rw-r--r--rust/alloc/vec/set_len_on_drop.rs35
-rw-r--r--rust/alloc/vec/spec_extend.rs119
15 files changed, 0 insertions, 9814 deletions
diff --git a/rust/alloc/README.md b/rust/alloc/README.md
deleted file mode 100644
index eb6f22e94e..0000000000
--- a/rust/alloc/README.md
+++ /dev/null
@@ -1,36 +0,0 @@
-# `alloc`
-
-These source files come from the Rust standard library, hosted in
-the <https://github.com/rust-lang/rust> repository, licensed under
-"Apache-2.0 OR MIT" and adapted for kernel use. For copyright details,
-see <https://github.com/rust-lang/rust/blob/master/COPYRIGHT>.
-
-Please note that these files should be kept as close as possible to
-upstream. In general, only additions should be performed (e.g. new
-methods). Eventually, changes should make it into upstream so that,
-at some point, this fork can be dropped from the kernel tree.
-
-The Rust upstream version on top of which these files are based matches
-the output of `scripts/min-tool-version.sh rustc`.
-
-
-## Rationale
-
-On one hand, kernel folks wanted to keep `alloc` in-tree to have more
-freedom in both workflow and actual features if actually needed
-(e.g. receiver types if we ended up using them), which is reasonable.
-
-On the other hand, Rust folks wanted to keep `alloc` as close as
-upstream as possible and avoid as much divergence as possible, which
-is also reasonable.
-
-We agreed on a middle-ground: we would keep a subset of `alloc`
-in-tree that would be as small and as close as possible to upstream.
-Then, upstream can start adding the functions that we add to `alloc`
-etc., until we reach a point where the kernel already knows exactly
-what it needs in `alloc` and all the new methods are merged into
-upstream, so that we can drop `alloc` from the kernel tree and go back
-to using the upstream one.
-
-By doing this, the kernel can go a bit faster now, and Rust can
-slowly incorporate and discuss the changes as needed.
diff --git a/rust/alloc/alloc.rs b/rust/alloc/alloc.rs
deleted file mode 100644
index abb791cc23..0000000000
--- a/rust/alloc/alloc.rs
+++ /dev/null
@@ -1,452 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-//! Memory allocation APIs
-
-#![stable(feature = "alloc_module", since = "1.28.0")]
-
-#[cfg(not(test))]
-use core::intrinsics;
-
-#[cfg(not(test))]
-use core::ptr::{self, NonNull};
-
-#[stable(feature = "alloc_module", since = "1.28.0")]
-#[doc(inline)]
-pub use core::alloc::*;
-
-#[cfg(test)]
-mod tests;
-
-extern "Rust" {
- // These are the magic symbols to call the global allocator. rustc generates
- // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute
- // (the code expanding that attribute macro generates those functions), or to call
- // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
- // otherwise.
- // The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them
- // like `malloc`, `realloc`, and `free`, respectively.
- #[rustc_allocator]
- #[rustc_nounwind]
- fn __rust_alloc(size: usize, align: usize) -> *mut u8;
- #[rustc_deallocator]
- #[rustc_nounwind]
- fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
- #[rustc_reallocator]
- #[rustc_nounwind]
- fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
- #[rustc_allocator_zeroed]
- #[rustc_nounwind]
- fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
-
- static __rust_no_alloc_shim_is_unstable: u8;
-}
-
-/// The global memory allocator.
-///
-/// This type implements the [`Allocator`] trait by forwarding calls
-/// to the allocator registered with the `#[global_allocator]` attribute
-/// if there is one, or the `std` crate’s default.
-///
-/// Note: while this type is unstable, the functionality it provides can be
-/// accessed through the [free functions in `alloc`](self#functions).
-#[unstable(feature = "allocator_api", issue = "32838")]
-#[derive(Copy, Clone, Default, Debug)]
-#[cfg(not(test))]
-pub struct Global;
-
-#[cfg(test)]
-pub use std::alloc::Global;
-
-/// Allocate memory with the global allocator.
-///
-/// This function forwards calls to the [`GlobalAlloc::alloc`] method
-/// of the allocator registered with the `#[global_allocator]` attribute
-/// if there is one, or the `std` crate’s default.
-///
-/// This function is expected to be deprecated in favor of the `alloc` method
-/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
-///
-/// # Safety
-///
-/// See [`GlobalAlloc::alloc`].
-///
-/// # Examples
-///
-/// ```
-/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
-///
-/// unsafe {
-/// let layout = Layout::new::<u16>();
-/// let ptr = alloc(layout);
-/// if ptr.is_null() {
-/// handle_alloc_error(layout);
-/// }
-///
-/// *(ptr as *mut u16) = 42;
-/// assert_eq!(*(ptr as *mut u16), 42);
-///
-/// dealloc(ptr, layout);
-/// }
-/// ```
-#[stable(feature = "global_alloc", since = "1.28.0")]
-#[must_use = "losing the pointer will leak memory"]
-#[inline]
-pub unsafe fn alloc(layout: Layout) -> *mut u8 {
- unsafe {
- // Make sure we don't accidentally allow omitting the allocator shim in
- // stable code until it is actually stabilized.
- core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
-
- __rust_alloc(layout.size(), layout.align())
- }
-}
-
-/// Deallocate memory with the global allocator.
-///
-/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
-/// of the allocator registered with the `#[global_allocator]` attribute
-/// if there is one, or the `std` crate’s default.
-///
-/// This function is expected to be deprecated in favor of the `dealloc` method
-/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
-///
-/// # Safety
-///
-/// See [`GlobalAlloc::dealloc`].
-#[stable(feature = "global_alloc", since = "1.28.0")]
-#[inline]
-pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
- unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
-}
-
-/// Reallocate memory with the global allocator.
-///
-/// This function forwards calls to the [`GlobalAlloc::realloc`] method
-/// of the allocator registered with the `#[global_allocator]` attribute
-/// if there is one, or the `std` crate’s default.
-///
-/// This function is expected to be deprecated in favor of the `realloc` method
-/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
-///
-/// # Safety
-///
-/// See [`GlobalAlloc::realloc`].
-#[stable(feature = "global_alloc", since = "1.28.0")]
-#[must_use = "losing the pointer will leak memory"]
-#[inline]
-pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
- unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
-}
-
-/// Allocate zero-initialized memory with the global allocator.
-///
-/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
-/// of the allocator registered with the `#[global_allocator]` attribute
-/// if there is one, or the `std` crate’s default.
-///
-/// This function is expected to be deprecated in favor of the `alloc_zeroed` method
-/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
-///
-/// # Safety
-///
-/// See [`GlobalAlloc::alloc_zeroed`].
-///
-/// # Examples
-///
-/// ```
-/// use std::alloc::{alloc_zeroed, dealloc, Layout};
-///
-/// unsafe {
-/// let layout = Layout::new::<u16>();
-/// let ptr = alloc_zeroed(layout);
-///
-/// assert_eq!(*(ptr as *mut u16), 0);
-///
-/// dealloc(ptr, layout);
-/// }
-/// ```
-#[stable(feature = "global_alloc", since = "1.28.0")]
-#[must_use = "losing the pointer will leak memory"]
-#[inline]
-pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
- unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
-}
-
-#[cfg(not(test))]
-impl Global {
- #[inline]
- fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
- match layout.size() {
- 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
- // SAFETY: `layout` is non-zero in size,
- size => unsafe {
- let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
- let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
- Ok(NonNull::slice_from_raw_parts(ptr, size))
- },
- }
- }
-
- // SAFETY: Same as `Allocator::grow`
- #[inline]
- unsafe fn grow_impl(
- &self,
- ptr: NonNull<u8>,
- old_layout: Layout,
- new_layout: Layout,
- zeroed: bool,
- ) -> Result<NonNull<[u8]>, AllocError> {
- debug_assert!(
- new_layout.size() >= old_layout.size(),
- "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
- );
-
- match old_layout.size() {
- 0 => self.alloc_impl(new_layout, zeroed),
-
- // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
- // as required by safety conditions. Other conditions must be upheld by the caller
- old_size if old_layout.align() == new_layout.align() => unsafe {
- let new_size = new_layout.size();
-
- // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
- intrinsics::assume(new_size >= old_layout.size());
-
- let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
- let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
- if zeroed {
- raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
- }
- Ok(NonNull::slice_from_raw_parts(ptr, new_size))
- },
-
- // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
- // both the old and new memory allocation are valid for reads and writes for `old_size`
- // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
- // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
- // for `dealloc` must be upheld by the caller.
- old_size => unsafe {
- let new_ptr = self.alloc_impl(new_layout, zeroed)?;
- ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
- self.deallocate(ptr, old_layout);
- Ok(new_ptr)
- },
- }
- }
-}
-
-#[unstable(feature = "allocator_api", issue = "32838")]
-#[cfg(not(test))]
-unsafe impl Allocator for Global {
- #[inline]
- fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
- self.alloc_impl(layout, false)
- }
-
- #[inline]
- fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
- self.alloc_impl(layout, true)
- }
-
- #[inline]
- unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
- if layout.size() != 0 {
- // SAFETY: `layout` is non-zero in size,
- // other conditions must be upheld by the caller
- unsafe { dealloc(ptr.as_ptr(), layout) }
- }
- }
-
- #[inline]
- unsafe fn grow(
- &self,
- ptr: NonNull<u8>,
- old_layout: Layout,
- new_layout: Layout,
- ) -> Result<NonNull<[u8]>, AllocError> {
- // SAFETY: all conditions must be upheld by the caller
- unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
- }
-
- #[inline]
- unsafe fn grow_zeroed(
- &self,
- ptr: NonNull<u8>,
- old_layout: Layout,
- new_layout: Layout,
- ) -> Result<NonNull<[u8]>, AllocError> {
- // SAFETY: all conditions must be upheld by the caller
- unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
- }
-
- #[inline]
- unsafe fn shrink(
- &self,
- ptr: NonNull<u8>,
- old_layout: Layout,
- new_layout: Layout,
- ) -> Result<NonNull<[u8]>, AllocError> {
- debug_assert!(
- new_layout.size() <= old_layout.size(),
- "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
- );
-
- match new_layout.size() {
- // SAFETY: conditions must be upheld by the caller
- 0 => unsafe {
- self.deallocate(ptr, old_layout);
- Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
- },
-
- // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
- new_size if old_layout.align() == new_layout.align() => unsafe {
- // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
- intrinsics::assume(new_size <= old_layout.size());
-
- let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
- let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
- Ok(NonNull::slice_from_raw_parts(ptr, new_size))
- },
-
- // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
- // both the old and new memory allocation are valid for reads and writes for `new_size`
- // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
- // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
- // for `dealloc` must be upheld by the caller.
- new_size => unsafe {
- let new_ptr = self.allocate(new_layout)?;
- ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
- self.deallocate(ptr, old_layout);
- Ok(new_ptr)
- },
- }
- }
-}
-
-/// The allocator for unique pointers.
-#[cfg(all(not(no_global_oom_handling), not(test)))]
-#[lang = "exchange_malloc"]
-#[inline]
-unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
- let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
- match Global.allocate(layout) {
- Ok(ptr) => ptr.as_mut_ptr(),
- Err(_) => handle_alloc_error(layout),
- }
-}
-
-// # Allocation error handler
-
-#[cfg(not(no_global_oom_handling))]
-extern "Rust" {
- // This is the magic symbol to call the global alloc error handler. rustc generates
- // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
- // default implementations below (`__rdl_oom`) otherwise.
- fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
-}
-
-/// Signal a memory allocation error.
-///
-/// Callers of memory allocation APIs wishing to cease execution
-/// in response to an allocation error are encouraged to call this function,
-/// rather than directly invoking [`panic!`] or similar.
-///
-/// This function is guaranteed to diverge (not return normally with a value), but depending on
-/// global configuration, it may either panic (resulting in unwinding or aborting as per
-/// configuration for all panics), or abort the process (with no unwinding).
-///
-/// The default behavior is:
-///
-/// * If the binary links against `std` (typically the case), then
-/// print a message to standard error and abort the process.
-/// This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
-/// Future versions of Rust may panic by default instead.
-///
-/// * If the binary does not link against `std` (all of its crates are marked
-/// [`#![no_std]`][no_std]), then call [`panic!`] with a message.
-/// [The panic handler] applies as to any panic.
-///
-/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
-/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
-/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
-/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
-#[stable(feature = "global_alloc", since = "1.28.0")]
-#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
-#[cfg(all(not(no_global_oom_handling), not(test)))]
-#[cold]
-pub const fn handle_alloc_error(layout: Layout) -> ! {
- const fn ct_error(_: Layout) -> ! {
- panic!("allocation failed");
- }
-
- #[inline]
- fn rt_error(layout: Layout) -> ! {
- unsafe {
- __rust_alloc_error_handler(layout.size(), layout.align());
- }
- }
-
- #[cfg(not(feature = "panic_immediate_abort"))]
- unsafe {
- core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
- }
-
- #[cfg(feature = "panic_immediate_abort")]
- ct_error(layout)
-}
-
-// For alloc test `std::alloc::handle_alloc_error` can be used directly.
-#[cfg(all(not(no_global_oom_handling), test))]
-pub use std::alloc::handle_alloc_error;
-
-#[cfg(all(not(no_global_oom_handling), not(test)))]
-#[doc(hidden)]
-#[allow(unused_attributes)]
-#[unstable(feature = "alloc_internals", issue = "none")]
-pub mod __alloc_error_handler {
- // called via generated `__rust_alloc_error_handler` if there is no
- // `#[alloc_error_handler]`.
- #[rustc_std_internal_symbol]
- pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! {
- extern "Rust" {
- // This symbol is emitted by rustc next to __rust_alloc_error_handler.
- // Its value depends on the -Zoom={panic,abort} compiler option.
- static __rust_alloc_error_handler_should_panic: u8;
- }
-
- if unsafe { __rust_alloc_error_handler_should_panic != 0 } {
- panic!("memory allocation of {size} bytes failed")
- } else {
- core::panicking::panic_nounwind_fmt(
- format_args!("memory allocation of {size} bytes failed"),
- /* force_no_backtrace */ false,
- )
- }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-/// Specialize clones into pre-allocated, uninitialized memory.
-/// Used by `Box::clone` and `Rc`/`Arc::make_mut`.
-pub(crate) trait WriteCloneIntoRaw: Sized {
- unsafe fn write_clone_into_raw(&self, target: *mut Self);
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Clone> WriteCloneIntoRaw for T {
- #[inline]
- default unsafe fn write_clone_into_raw(&self, target: *mut Self) {
- // Having allocated *first* may allow the optimizer to create
- // the cloned value in-place, skipping the local and move.
- unsafe { target.write(self.clone()) };
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Copy> WriteCloneIntoRaw for T {
- #[inline]
- unsafe fn write_clone_into_raw(&self, target: *mut Self) {
- // We can always copy in-place, without ever involving a local value.
- unsafe { target.copy_from_nonoverlapping(self, 1) };
- }
-}
diff --git a/rust/alloc/boxed.rs b/rust/alloc/boxed.rs
deleted file mode 100644
index c93a22a5c9..0000000000
--- a/rust/alloc/boxed.rs
+++ /dev/null
@@ -1,2463 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-//! The `Box<T>` type for heap allocation.
-//!
-//! [`Box<T>`], casually referred to as a 'box', provides the simplest form of
-//! heap allocation in Rust. Boxes provide ownership for this allocation, and
-//! drop their contents when they go out of scope. Boxes also ensure that they
-//! never allocate more than `isize::MAX` bytes.
-//!
-//! # Examples
-//!
-//! Move a value from the stack to the heap by creating a [`Box`]:
-//!
-//! ```
-//! let val: u8 = 5;
-//! let boxed: Box<u8> = Box::new(val);
-//! ```
-//!
-//! Move a value from a [`Box`] back to the stack by [dereferencing]:
-//!
-//! ```
-//! let boxed: Box<u8> = Box::new(5);
-//! let val: u8 = *boxed;
-//! ```
-//!
-//! Creating a recursive data structure:
-//!
-//! ```
-//! #[derive(Debug)]
-//! enum List<T> {
-//! Cons(T, Box<List<T>>),
-//! Nil,
-//! }
-//!
-//! let list: List<i32> = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil))));
-//! println!("{list:?}");
-//! ```
-//!
-//! This will print `Cons(1, Cons(2, Nil))`.
-//!
-//! Recursive structures must be boxed, because if the definition of `Cons`
-//! looked like this:
-//!
-//! ```compile_fail,E0072
-//! # enum List<T> {
-//! Cons(T, List<T>),
-//! # }
-//! ```
-//!
-//! It wouldn't work. This is because the size of a `List` depends on how many
-//! elements are in the list, and so we don't know how much memory to allocate
-//! for a `Cons`. By introducing a [`Box<T>`], which has a defined size, we know how
-//! big `Cons` needs to be.
-//!
-//! # Memory layout
-//!
-//! For non-zero-sized values, a [`Box`] will use the [`Global`] allocator for
-//! its allocation. It is valid to convert both ways between a [`Box`] and a
-//! raw pointer allocated with the [`Global`] allocator, given that the
-//! [`Layout`] used with the allocator is correct for the type. More precisely,
-//! a `value: *mut T` that has been allocated with the [`Global`] allocator
-//! with `Layout::for_value(&*value)` may be converted into a box using
-//! [`Box::<T>::from_raw(value)`]. Conversely, the memory backing a `value: *mut
-//! T` obtained from [`Box::<T>::into_raw`] may be deallocated using the
-//! [`Global`] allocator with [`Layout::for_value(&*value)`].
-//!
-//! For zero-sized values, the `Box` pointer still has to be [valid] for reads
-//! and writes and sufficiently aligned. In particular, casting any aligned
-//! non-zero integer literal to a raw pointer produces a valid pointer, but a
-//! pointer pointing into previously allocated memory that since got freed is
-//! not valid. The recommended way to build a Box to a ZST if `Box::new` cannot
-//! be used is to use [`ptr::NonNull::dangling`].
-//!
-//! So long as `T: Sized`, a `Box<T>` is guaranteed to be represented
-//! as a single pointer and is also ABI-compatible with C pointers
-//! (i.e. the C type `T*`). This means that if you have extern "C"
-//! Rust functions that will be called from C, you can define those
-//! Rust functions using `Box<T>` types, and use `T*` as corresponding
-//! type on the C side. As an example, consider this C header which
-//! declares functions that create and destroy some kind of `Foo`
-//! value:
-//!
-//! ```c
-//! /* C header */
-//!
-//! /* Returns ownership to the caller */
-//! struct Foo* foo_new(void);
-//!
-//! /* Takes ownership from the caller; no-op when invoked with null */
-//! void foo_delete(struct Foo*);
-//! ```
-//!
-//! These two functions might be implemented in Rust as follows. Here, the
-//! `struct Foo*` type from C is translated to `Box<Foo>`, which captures
-//! the ownership constraints. Note also that the nullable argument to
-//! `foo_delete` is represented in Rust as `Option<Box<Foo>>`, since `Box<Foo>`
-//! cannot be null.
-//!
-//! ```
-//! #[repr(C)]
-//! pub struct Foo;
-//!
-//! #[no_mangle]
-//! pub extern "C" fn foo_new() -> Box<Foo> {
-//! Box::new(Foo)
-//! }
-//!
-//! #[no_mangle]
-//! pub extern "C" fn foo_delete(_: Option<Box<Foo>>) {}
-//! ```
-//!
-//! Even though `Box<T>` has the same representation and C ABI as a C pointer,
-//! this does not mean that you can convert an arbitrary `T*` into a `Box<T>`
-//! and expect things to work. `Box<T>` values will always be fully aligned,
-//! non-null pointers. Moreover, the destructor for `Box<T>` will attempt to
-//! free the value with the global allocator. In general, the best practice
-//! is to only use `Box<T>` for pointers that originated from the global
-//! allocator.
-//!
-//! **Important.** At least at present, you should avoid using
-//! `Box<T>` types for functions that are defined in C but invoked
-//! from Rust. In those cases, you should directly mirror the C types
-//! as closely as possible. Using types like `Box<T>` where the C
-//! definition is just using `T*` can lead to undefined behavior, as
-//! described in [rust-lang/unsafe-code-guidelines#198][ucg#198].
-//!
-//! # Considerations for unsafe code
-//!
-//! **Warning: This section is not normative and is subject to change, possibly
-//! being relaxed in the future! It is a simplified summary of the rules
-//! currently implemented in the compiler.**
-//!
-//! The aliasing rules for `Box<T>` are the same as for `&mut T`. `Box<T>`
-//! asserts uniqueness over its content. Using raw pointers derived from a box
-//! after that box has been mutated through, moved or borrowed as `&mut T`
-//! is not allowed. For more guidance on working with box from unsafe code, see
-//! [rust-lang/unsafe-code-guidelines#326][ucg#326].
-//!
-//!
-//! [ucg#198]: https://github.com/rust-lang/unsafe-code-guidelines/issues/198
-//! [ucg#326]: https://github.com/rust-lang/unsafe-code-guidelines/issues/326
-//! [dereferencing]: core::ops::Deref
-//! [`Box::<T>::from_raw(value)`]: Box::from_raw
-//! [`Global`]: crate::alloc::Global
-//! [`Layout`]: crate::alloc::Layout
-//! [`Layout::for_value(&*value)`]: crate::alloc::Layout::for_value
-//! [valid]: ptr#safety
-
-#![stable(feature = "rust1", since = "1.0.0")]
-
-use core::any::Any;
-use core::async_iter::AsyncIterator;
-use core::borrow;
-use core::cmp::Ordering;
-use core::error::Error;
-use core::fmt;
-use core::future::Future;
-use core::hash::{Hash, Hasher};
-use core::iter::FusedIterator;
-use core::marker::Tuple;
-use core::marker::Unsize;
-use core::mem::{self, SizedTypeProperties};
-use core::ops::{
- CoerceUnsized, Coroutine, CoroutineState, Deref, DerefMut, DispatchFromDyn, Receiver,
-};
-use core::pin::Pin;
-use core::ptr::{self, NonNull, Unique};
-use core::task::{Context, Poll};
-
-#[cfg(not(no_global_oom_handling))]
-use crate::alloc::{handle_alloc_error, WriteCloneIntoRaw};
-use crate::alloc::{AllocError, Allocator, Global, Layout};
-#[cfg(not(no_global_oom_handling))]
-use crate::borrow::Cow;
-use crate::raw_vec::RawVec;
-#[cfg(not(no_global_oom_handling))]
-use crate::str::from_boxed_utf8_unchecked;
-#[cfg(not(no_global_oom_handling))]
-use crate::string::String;
-#[cfg(not(no_global_oom_handling))]
-use crate::vec::Vec;
-
-#[cfg(not(no_thin))]
-#[unstable(feature = "thin_box", issue = "92791")]
-pub use thin::ThinBox;
-
-#[cfg(not(no_thin))]
-mod thin;
-
-/// A pointer type that uniquely owns a heap allocation of type `T`.
-///
-/// See the [module-level documentation](../../std/boxed/index.html) for more.
-#[lang = "owned_box"]
-#[fundamental]
-#[stable(feature = "rust1", since = "1.0.0")]
-// The declaration of the `Box` struct must be kept in sync with the
-// `alloc::alloc::box_free` function or ICEs will happen. See the comment
-// on `box_free` for more details.
-pub struct Box<
- T: ?Sized,
- #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
->(Unique<T>, A);
-
-impl<T> Box<T> {
- /// Allocates memory on the heap and then places `x` into it.
- ///
- /// This doesn't actually allocate if `T` is zero-sized.
- ///
- /// # Examples
- ///
- /// ```
- /// let five = Box::new(5);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[inline(always)]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[must_use]
- #[rustc_diagnostic_item = "box_new"]
- pub fn new(x: T) -> Self {
- #[rustc_box]
- Box::new(x)
- }
-
- /// Constructs a new box with uninitialized contents.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(new_uninit)]
- ///
- /// let mut five = Box::<u32>::new_uninit();
- ///
- /// let five = unsafe {
- /// // Deferred initialization:
- /// five.as_mut_ptr().write(5);
- ///
- /// five.assume_init()
- /// };
- ///
- /// assert_eq!(*five, 5)
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[unstable(feature = "new_uninit", issue = "63291")]
- #[must_use]
- #[inline]
- pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
- Self::new_uninit_in(Global)
- }
-
- /// Constructs a new `Box` with uninitialized contents, with the memory
- /// being filled with `0` bytes.
- ///
- /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
- /// of this method.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(new_uninit)]
- ///
- /// let zero = Box::<u32>::new_zeroed();
- /// let zero = unsafe { zero.assume_init() };
- ///
- /// assert_eq!(*zero, 0)
- /// ```
- ///
- /// [zeroed]: mem::MaybeUninit::zeroed
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- #[unstable(feature = "new_uninit", issue = "63291")]
- #[must_use]
- pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
- Self::new_zeroed_in(Global)
- }
-
- /// Constructs a new `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then
- /// `x` will be pinned in memory and unable to be moved.
- ///
- /// Constructing and pinning of the `Box` can also be done in two steps: `Box::pin(x)`
- /// does the same as <code>[Box::into_pin]\([Box::new]\(x))</code>. Consider using
- /// [`into_pin`](Box::into_pin) if you already have a `Box<T>`, or if you want to
- /// construct a (pinned) `Box` in a different way than with [`Box::new`].
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "pin", since = "1.33.0")]
- #[must_use]
- #[inline(always)]
- pub fn pin(x: T) -> Pin<Box<T>> {
- Box::new(x).into()
- }
-
- /// Allocates memory on the heap then places `x` into it,
- /// returning an error if the allocation fails
- ///
- /// This doesn't actually allocate if `T` is zero-sized.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// let five = Box::try_new(5)?;
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn try_new(x: T) -> Result<Self, AllocError> {
- Self::try_new_in(x, Global)
- }
-
- /// Constructs a new box with uninitialized contents on the heap,
- /// returning an error if the allocation fails
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// let mut five = Box::<u32>::try_new_uninit()?;
- ///
- /// let five = unsafe {
- /// // Deferred initialization:
- /// five.as_mut_ptr().write(5);
- ///
- /// five.assume_init()
- /// };
- ///
- /// assert_eq!(*five, 5);
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- #[unstable(feature = "allocator_api", issue = "32838")]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- #[inline]
- pub fn try_new_uninit() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
- Box::try_new_uninit_in(Global)
- }
-
- /// Constructs a new `Box` with uninitialized contents, with the memory
- /// being filled with `0` bytes on the heap
- ///
- /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
- /// of this method.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// let zero = Box::<u32>::try_new_zeroed()?;
- /// let zero = unsafe { zero.assume_init() };
- ///
- /// assert_eq!(*zero, 0);
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- ///
- /// [zeroed]: mem::MaybeUninit::zeroed
- #[unstable(feature = "allocator_api", issue = "32838")]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- #[inline]
- pub fn try_new_zeroed() -> Result<Box<mem::MaybeUninit<T>>, AllocError> {
- Box::try_new_zeroed_in(Global)
- }
-}
-
-impl<T, A: Allocator> Box<T, A> {
- /// Allocates memory in the given allocator then places `x` into it.
- ///
- /// This doesn't actually allocate if `T` is zero-sized.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// let five = Box::new_in(5, System);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[must_use]
- #[inline]
- pub fn new_in(x: T, alloc: A) -> Self
- where
- A: Allocator,
- {
- let mut boxed = Self::new_uninit_in(alloc);
- unsafe {
- boxed.as_mut_ptr().write(x);
- boxed.assume_init()
- }
- }
-
- /// Allocates memory in the given allocator then places `x` into it,
- /// returning an error if the allocation fails
- ///
- /// This doesn't actually allocate if `T` is zero-sized.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// let five = Box::try_new_in(5, System)?;
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn try_new_in(x: T, alloc: A) -> Result<Self, AllocError>
- where
- A: Allocator,
- {
- let mut boxed = Self::try_new_uninit_in(alloc)?;
- unsafe {
- boxed.as_mut_ptr().write(x);
- Ok(boxed.assume_init())
- }
- }
-
- /// Constructs a new box with uninitialized contents in the provided allocator.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// use std::alloc::System;
- ///
- /// let mut five = Box::<u32, _>::new_uninit_in(System);
- ///
- /// let five = unsafe {
- /// // Deferred initialization:
- /// five.as_mut_ptr().write(5);
- ///
- /// five.assume_init()
- /// };
- ///
- /// assert_eq!(*five, 5)
- /// ```
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[cfg(not(no_global_oom_handling))]
- #[must_use]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- pub fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A>
- where
- A: Allocator,
- {
- let layout = Layout::new::<mem::MaybeUninit<T>>();
- // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
- // That would make code size bigger.
- match Box::try_new_uninit_in(alloc) {
- Ok(m) => m,
- Err(_) => handle_alloc_error(layout),
- }
- }
-
- /// Constructs a new box with uninitialized contents in the provided allocator,
- /// returning an error if the allocation fails
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// use std::alloc::System;
- ///
- /// let mut five = Box::<u32, _>::try_new_uninit_in(System)?;
- ///
- /// let five = unsafe {
- /// // Deferred initialization:
- /// five.as_mut_ptr().write(5);
- ///
- /// five.assume_init()
- /// };
- ///
- /// assert_eq!(*five, 5);
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- #[unstable(feature = "allocator_api", issue = "32838")]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- pub fn try_new_uninit_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError>
- where
- A: Allocator,
- {
- let ptr = if T::IS_ZST {
- NonNull::dangling()
- } else {
- let layout = Layout::new::<mem::MaybeUninit<T>>();
- alloc.allocate(layout)?.cast()
- };
- unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
- }
-
- /// Constructs a new `Box` with uninitialized contents, with the memory
- /// being filled with `0` bytes in the provided allocator.
- ///
- /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
- /// of this method.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// use std::alloc::System;
- ///
- /// let zero = Box::<u32, _>::new_zeroed_in(System);
- /// let zero = unsafe { zero.assume_init() };
- ///
- /// assert_eq!(*zero, 0)
- /// ```
- ///
- /// [zeroed]: mem::MaybeUninit::zeroed
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[cfg(not(no_global_oom_handling))]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- #[must_use]
- pub fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A>
- where
- A: Allocator,
- {
- let layout = Layout::new::<mem::MaybeUninit<T>>();
- // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
- // That would make code size bigger.
- match Box::try_new_zeroed_in(alloc) {
- Ok(m) => m,
- Err(_) => handle_alloc_error(layout),
- }
- }
-
- /// Constructs a new `Box` with uninitialized contents, with the memory
- /// being filled with `0` bytes in the provided allocator,
- /// returning an error if the allocation fails,
- ///
- /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
- /// of this method.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// use std::alloc::System;
- ///
- /// let zero = Box::<u32, _>::try_new_zeroed_in(System)?;
- /// let zero = unsafe { zero.assume_init() };
- ///
- /// assert_eq!(*zero, 0);
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- ///
- /// [zeroed]: mem::MaybeUninit::zeroed
- #[unstable(feature = "allocator_api", issue = "32838")]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- pub fn try_new_zeroed_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError>
- where
- A: Allocator,
- {
- let ptr = if T::IS_ZST {
- NonNull::dangling()
- } else {
- let layout = Layout::new::<mem::MaybeUninit<T>>();
- alloc.allocate_zeroed(layout)?.cast()
- };
- unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
- }
-
- /// Constructs a new `Pin<Box<T, A>>`. If `T` does not implement [`Unpin`], then
- /// `x` will be pinned in memory and unable to be moved.
- ///
- /// Constructing and pinning of the `Box` can also be done in two steps: `Box::pin_in(x, alloc)`
- /// does the same as <code>[Box::into_pin]\([Box::new_in]\(x, alloc))</code>. Consider using
- /// [`into_pin`](Box::into_pin) if you already have a `Box<T, A>`, or if you want to
- /// construct a (pinned) `Box` in a different way than with [`Box::new_in`].
- #[cfg(not(no_global_oom_handling))]
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[must_use]
- #[inline(always)]
- pub fn pin_in(x: T, alloc: A) -> Pin<Self>
- where
- A: 'static + Allocator,
- {
- Self::into_pin(Self::new_in(x, alloc))
- }
-
- /// Converts a `Box<T>` into a `Box<[T]>`
- ///
- /// This conversion does not allocate on the heap and happens in place.
- #[unstable(feature = "box_into_boxed_slice", issue = "71582")]
- pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> {
- let (raw, alloc) = Box::into_raw_with_allocator(boxed);
- unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) }
- }
-
- /// Consumes the `Box`, returning the wrapped value.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(box_into_inner)]
- ///
- /// let c = Box::new(5);
- ///
- /// assert_eq!(Box::into_inner(c), 5);
- /// ```
- #[unstable(feature = "box_into_inner", issue = "80437")]
- #[inline]
- pub fn into_inner(boxed: Self) -> T {
- *boxed
- }
-}
-
-impl<T> Box<[T]> {
- /// Constructs a new boxed slice with uninitialized contents.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(new_uninit)]
- ///
- /// let mut values = Box::<[u32]>::new_uninit_slice(3);
- ///
- /// let values = unsafe {
- /// // Deferred initialization:
- /// values[0].as_mut_ptr().write(1);
- /// values[1].as_mut_ptr().write(2);
- /// values[2].as_mut_ptr().write(3);
- ///
- /// values.assume_init()
- /// };
- ///
- /// assert_eq!(*values, [1, 2, 3])
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[unstable(feature = "new_uninit", issue = "63291")]
- #[must_use]
- pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
- unsafe { RawVec::with_capacity(len).into_box(len) }
- }
-
- /// Constructs a new boxed slice with uninitialized contents, with the memory
- /// being filled with `0` bytes.
- ///
- /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
- /// of this method.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(new_uninit)]
- ///
- /// let values = Box::<[u32]>::new_zeroed_slice(3);
- /// let values = unsafe { values.assume_init() };
- ///
- /// assert_eq!(*values, [0, 0, 0])
- /// ```
- ///
- /// [zeroed]: mem::MaybeUninit::zeroed
- #[cfg(not(no_global_oom_handling))]
- #[unstable(feature = "new_uninit", issue = "63291")]
- #[must_use]
- pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
- unsafe { RawVec::with_capacity_zeroed(len).into_box(len) }
- }
-
- /// Constructs a new boxed slice with uninitialized contents. Returns an error if
- /// the allocation fails
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// let mut values = Box::<[u32]>::try_new_uninit_slice(3)?;
- /// let values = unsafe {
- /// // Deferred initialization:
- /// values[0].as_mut_ptr().write(1);
- /// values[1].as_mut_ptr().write(2);
- /// values[2].as_mut_ptr().write(3);
- /// values.assume_init()
- /// };
- ///
- /// assert_eq!(*values, [1, 2, 3]);
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn try_new_uninit_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
- let ptr = if T::IS_ZST || len == 0 {
- NonNull::dangling()
- } else {
- let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
- Ok(l) => l,
- Err(_) => return Err(AllocError),
- };
- Global.allocate(layout)?.cast()
- };
- unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
- }
-
- /// Constructs a new boxed slice with uninitialized contents, with the memory
- /// being filled with `0` bytes. Returns an error if the allocation fails
- ///
- /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
- /// of this method.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// let values = Box::<[u32]>::try_new_zeroed_slice(3)?;
- /// let values = unsafe { values.assume_init() };
- ///
- /// assert_eq!(*values, [0, 0, 0]);
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- ///
- /// [zeroed]: mem::MaybeUninit::zeroed
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn try_new_zeroed_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
- let ptr = if T::IS_ZST || len == 0 {
- NonNull::dangling()
- } else {
- let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
- Ok(l) => l,
- Err(_) => return Err(AllocError),
- };
- Global.allocate_zeroed(layout)?.cast()
- };
- unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
- }
-}
-
-impl<T, A: Allocator> Box<[T], A> {
- /// Constructs a new boxed slice with uninitialized contents in the provided allocator.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// use std::alloc::System;
- ///
- /// let mut values = Box::<[u32], _>::new_uninit_slice_in(3, System);
- ///
- /// let values = unsafe {
- /// // Deferred initialization:
- /// values[0].as_mut_ptr().write(1);
- /// values[1].as_mut_ptr().write(2);
- /// values[2].as_mut_ptr().write(3);
- ///
- /// values.assume_init()
- /// };
- ///
- /// assert_eq!(*values, [1, 2, 3])
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[unstable(feature = "allocator_api", issue = "32838")]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- #[must_use]
- pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> {
- unsafe { RawVec::with_capacity_in(len, alloc).into_box(len) }
- }
-
- /// Constructs a new boxed slice with uninitialized contents in the provided allocator,
- /// with the memory being filled with `0` bytes.
- ///
- /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
- /// of this method.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, new_uninit)]
- ///
- /// use std::alloc::System;
- ///
- /// let values = Box::<[u32], _>::new_zeroed_slice_in(3, System);
- /// let values = unsafe { values.assume_init() };
- ///
- /// assert_eq!(*values, [0, 0, 0])
- /// ```
- ///
- /// [zeroed]: mem::MaybeUninit::zeroed
- #[cfg(not(no_global_oom_handling))]
- #[unstable(feature = "allocator_api", issue = "32838")]
- // #[unstable(feature = "new_uninit", issue = "63291")]
- #[must_use]
- pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> {
- unsafe { RawVec::with_capacity_zeroed_in(len, alloc).into_box(len) }
- }
-}
-
-impl<T, A: Allocator> Box<mem::MaybeUninit<T>, A> {
- /// Converts to `Box<T, A>`.
- ///
- /// # Safety
- ///
- /// As with [`MaybeUninit::assume_init`],
- /// it is up to the caller to guarantee that the value
- /// really is in an initialized state.
- /// Calling this when the content is not yet fully initialized
- /// causes immediate undefined behavior.
- ///
- /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(new_uninit)]
- ///
- /// let mut five = Box::<u32>::new_uninit();
- ///
- /// let five: Box<u32> = unsafe {
- /// // Deferred initialization:
- /// five.as_mut_ptr().write(5);
- ///
- /// five.assume_init()
- /// };
- ///
- /// assert_eq!(*five, 5)
- /// ```
- #[unstable(feature = "new_uninit", issue = "63291")]
- #[inline]
- pub unsafe fn assume_init(self) -> Box<T, A> {
- let (raw, alloc) = Box::into_raw_with_allocator(self);
- unsafe { Box::from_raw_in(raw as *mut T, alloc) }
- }
-
- /// Writes the value and converts to `Box<T, A>`.
- ///
- /// This method converts the box similarly to [`Box::assume_init`] but
- /// writes `value` into it before conversion thus guaranteeing safety.
- /// In some scenarios use of this method may improve performance because
- /// the compiler may be able to optimize copying from stack.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(new_uninit)]
- ///
- /// let big_box = Box::<[usize; 1024]>::new_uninit();
- ///
- /// let mut array = [0; 1024];
- /// for (i, place) in array.iter_mut().enumerate() {
- /// *place = i;
- /// }
- ///
- /// // The optimizer may be able to elide this copy, so previous code writes
- /// // to heap directly.
- /// let big_box = Box::write(big_box, array);
- ///
- /// for (i, x) in big_box.iter().enumerate() {
- /// assert_eq!(*x, i);
- /// }
- /// ```
- #[unstable(feature = "new_uninit", issue = "63291")]
- #[inline]
- pub fn write(mut boxed: Self, value: T) -> Box<T, A> {
- unsafe {
- (*boxed).write(value);
- boxed.assume_init()
- }
- }
-}
-
-impl<T, A: Allocator> Box<[mem::MaybeUninit<T>], A> {
- /// Converts to `Box<[T], A>`.
- ///
- /// # Safety
- ///
- /// As with [`MaybeUninit::assume_init`],
- /// it is up to the caller to guarantee that the values
- /// really are in an initialized state.
- /// Calling this when the content is not yet fully initialized
- /// causes immediate undefined behavior.
- ///
- /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(new_uninit)]
- ///
- /// let mut values = Box::<[u32]>::new_uninit_slice(3);
- ///
- /// let values = unsafe {
- /// // Deferred initialization:
- /// values[0].as_mut_ptr().write(1);
- /// values[1].as_mut_ptr().write(2);
- /// values[2].as_mut_ptr().write(3);
- ///
- /// values.assume_init()
- /// };
- ///
- /// assert_eq!(*values, [1, 2, 3])
- /// ```
- #[unstable(feature = "new_uninit", issue = "63291")]
- #[inline]
- pub unsafe fn assume_init(self) -> Box<[T], A> {
- let (raw, alloc) = Box::into_raw_with_allocator(self);
- unsafe { Box::from_raw_in(raw as *mut [T], alloc) }
- }
-}
-
-impl<T: ?Sized> Box<T> {
- /// Constructs a box from a raw pointer.
- ///
- /// After calling this function, the raw pointer is owned by the
- /// resulting `Box`. Specifically, the `Box` destructor will call
- /// the destructor of `T` and free the allocated memory. For this
- /// to be safe, the memory must have been allocated in accordance
- /// with the [memory layout] used by `Box` .
- ///
- /// # Safety
- ///
- /// This function is unsafe because improper use may lead to
- /// memory problems. For example, a double-free may occur if the
- /// function is called twice on the same raw pointer.
- ///
- /// The safety conditions are described in the [memory layout] section.
- ///
- /// # Examples
- ///
- /// Recreate a `Box` which was previously converted to a raw pointer
- /// using [`Box::into_raw`]:
- /// ```
- /// let x = Box::new(5);
- /// let ptr = Box::into_raw(x);
- /// let x = unsafe { Box::from_raw(ptr) };
- /// ```
- /// Manually create a `Box` from scratch by using the global allocator:
- /// ```
- /// use std::alloc::{alloc, Layout};
- ///
- /// unsafe {
- /// let ptr = alloc(Layout::new::<i32>()) as *mut i32;
- /// // In general .write is required to avoid attempting to destruct
- /// // the (uninitialized) previous contents of `ptr`, though for this
- /// // simple example `*ptr = 5` would have worked as well.
- /// ptr.write(5);
- /// let x = Box::from_raw(ptr);
- /// }
- /// ```
- ///
- /// [memory layout]: self#memory-layout
- /// [`Layout`]: crate::Layout
- #[stable(feature = "box_raw", since = "1.4.0")]
- #[inline]
- #[must_use = "call `drop(Box::from_raw(ptr))` if you intend to drop the `Box`"]
- pub unsafe fn from_raw(raw: *mut T) -> Self {
- unsafe { Self::from_raw_in(raw, Global) }
- }
-}
-
-impl<T: ?Sized, A: Allocator> Box<T, A> {
- /// Constructs a box from a raw pointer in the given allocator.
- ///
- /// After calling this function, the raw pointer is owned by the
- /// resulting `Box`. Specifically, the `Box` destructor will call
- /// the destructor of `T` and free the allocated memory. For this
- /// to be safe, the memory must have been allocated in accordance
- /// with the [memory layout] used by `Box` .
- ///
- /// # Safety
- ///
- /// This function is unsafe because improper use may lead to
- /// memory problems. For example, a double-free may occur if the
- /// function is called twice on the same raw pointer.
- ///
- ///
- /// # Examples
- ///
- /// Recreate a `Box` which was previously converted to a raw pointer
- /// using [`Box::into_raw_with_allocator`]:
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// let x = Box::new_in(5, System);
- /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
- /// let x = unsafe { Box::from_raw_in(ptr, alloc) };
- /// ```
- /// Manually create a `Box` from scratch by using the system allocator:
- /// ```
- /// #![feature(allocator_api, slice_ptr_get)]
- ///
- /// use std::alloc::{Allocator, Layout, System};
- ///
- /// unsafe {
- /// let ptr = System.allocate(Layout::new::<i32>())?.as_mut_ptr() as *mut i32;
- /// // In general .write is required to avoid attempting to destruct
- /// // the (uninitialized) previous contents of `ptr`, though for this
- /// // simple example `*ptr = 5` would have worked as well.
- /// ptr.write(5);
- /// let x = Box::from_raw_in(ptr, System);
- /// }
- /// # Ok::<(), std::alloc::AllocError>(())
- /// ```
- ///
- /// [memory layout]: self#memory-layout
- /// [`Layout`]: crate::Layout
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[rustc_const_unstable(feature = "const_box", issue = "92521")]
- #[inline]
- pub const unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self {
- Box(unsafe { Unique::new_unchecked(raw) }, alloc)
- }
-
- /// Consumes the `Box`, returning a wrapped raw pointer.
- ///
- /// The pointer will be properly aligned and non-null.
- ///
- /// After calling this function, the caller is responsible for the
- /// memory previously managed by the `Box`. In particular, the
- /// caller should properly destroy `T` and release the memory, taking
- /// into account the [memory layout] used by `Box`. The easiest way to
- /// do this is to convert the raw pointer back into a `Box` with the
- /// [`Box::from_raw`] function, allowing the `Box` destructor to perform
- /// the cleanup.
- ///
- /// Note: this is an associated function, which means that you have
- /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
- /// is so that there is no conflict with a method on the inner type.
- ///
- /// # Examples
- /// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
- /// for automatic cleanup:
- /// ```
- /// let x = Box::new(String::from("Hello"));
- /// let ptr = Box::into_raw(x);
- /// let x = unsafe { Box::from_raw(ptr) };
- /// ```
- /// Manual cleanup by explicitly running the destructor and deallocating
- /// the memory:
- /// ```
- /// use std::alloc::{dealloc, Layout};
- /// use std::ptr;
- ///
- /// let x = Box::new(String::from("Hello"));
- /// let ptr = Box::into_raw(x);
- /// unsafe {
- /// ptr::drop_in_place(ptr);
- /// dealloc(ptr as *mut u8, Layout::new::<String>());
- /// }
- /// ```
- /// Note: This is equivalent to the following:
- /// ```
- /// let x = Box::new(String::from("Hello"));
- /// let ptr = Box::into_raw(x);
- /// unsafe {
- /// drop(Box::from_raw(ptr));
- /// }
- /// ```
- ///
- /// [memory layout]: self#memory-layout
- #[stable(feature = "box_raw", since = "1.4.0")]
- #[inline]
- pub fn into_raw(b: Self) -> *mut T {
- Self::into_raw_with_allocator(b).0
- }
-
- /// Consumes the `Box`, returning a wrapped raw pointer and the allocator.
- ///
- /// The pointer will be properly aligned and non-null.
- ///
- /// After calling this function, the caller is responsible for the
- /// memory previously managed by the `Box`. In particular, the
- /// caller should properly destroy `T` and release the memory, taking
- /// into account the [memory layout] used by `Box`. The easiest way to
- /// do this is to convert the raw pointer back into a `Box` with the
- /// [`Box::from_raw_in`] function, allowing the `Box` destructor to perform
- /// the cleanup.
- ///
- /// Note: this is an associated function, which means that you have
- /// to call it as `Box::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This
- /// is so that there is no conflict with a method on the inner type.
- ///
- /// # Examples
- /// Converting the raw pointer back into a `Box` with [`Box::from_raw_in`]
- /// for automatic cleanup:
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// let x = Box::new_in(String::from("Hello"), System);
- /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
- /// let x = unsafe { Box::from_raw_in(ptr, alloc) };
- /// ```
- /// Manual cleanup by explicitly running the destructor and deallocating
- /// the memory:
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::{Allocator, Layout, System};
- /// use std::ptr::{self, NonNull};
- ///
- /// let x = Box::new_in(String::from("Hello"), System);
- /// let (ptr, alloc) = Box::into_raw_with_allocator(x);
- /// unsafe {
- /// ptr::drop_in_place(ptr);
- /// let non_null = NonNull::new_unchecked(ptr);
- /// alloc.deallocate(non_null.cast(), Layout::new::<String>());
- /// }
- /// ```
- ///
- /// [memory layout]: self#memory-layout
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) {
- let (leaked, alloc) = Box::into_unique(b);
- (leaked.as_ptr(), alloc)
- }
-
- #[unstable(
- feature = "ptr_internals",
- issue = "none",
- reason = "use `Box::leak(b).into()` or `Unique::from(Box::leak(b))` instead"
- )]
- #[inline]
- #[doc(hidden)]
- pub fn into_unique(b: Self) -> (Unique<T>, A) {
- // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
- // raw pointer for the type system. Turning it directly into a raw pointer would not be
- // recognized as "releasing" the unique pointer to permit aliased raw accesses,
- // so all raw pointer methods have to go through `Box::leak`. Turning *that* to a raw pointer
- // behaves correctly.
- let alloc = unsafe { ptr::read(&b.1) };
- (Unique::from(Box::leak(b)), alloc)
- }
-
- /// Returns a reference to the underlying allocator.
- ///
- /// Note: this is an associated function, which means that you have
- /// to call it as `Box::allocator(&b)` instead of `b.allocator()`. This
- /// is so that there is no conflict with a method on the inner type.
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[rustc_const_unstable(feature = "const_box", issue = "92521")]
- #[inline]
- pub const fn allocator(b: &Self) -> &A {
- &b.1
- }
-
- /// Consumes and leaks the `Box`, returning a mutable reference,
- /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime
- /// `'a`. If the type has only static references, or none at all, then this
- /// may be chosen to be `'static`.
- ///
- /// This function is mainly useful for data that lives for the remainder of
- /// the program's life. Dropping the returned reference will cause a memory
- /// leak. If this is not acceptable, the reference should first be wrapped
- /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can
- /// then be dropped which will properly destroy `T` and release the
- /// allocated memory.
- ///
- /// Note: this is an associated function, which means that you have
- /// to call it as `Box::leak(b)` instead of `b.leak()`. This
- /// is so that there is no conflict with a method on the inner type.
- ///
- /// # Examples
- ///
- /// Simple usage:
- ///
- /// ```
- /// let x = Box::new(41);
- /// let static_ref: &'static mut usize = Box::leak(x);
- /// *static_ref += 1;
- /// assert_eq!(*static_ref, 42);
- /// ```
- ///
- /// Unsized data:
- ///
- /// ```
- /// let x = vec![1, 2, 3].into_boxed_slice();
- /// let static_ref = Box::leak(x);
- /// static_ref[0] = 4;
- /// assert_eq!(*static_ref, [4, 2, 3]);
- /// ```
- #[stable(feature = "box_leak", since = "1.26.0")]
- #[inline]
- pub fn leak<'a>(b: Self) -> &'a mut T
- where
- A: 'a,
- {
- unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() }
- }
-
- /// Converts a `Box<T>` into a `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then
- /// `*boxed` will be pinned in memory and unable to be moved.
- ///
- /// This conversion does not allocate on the heap and happens in place.
- ///
- /// This is also available via [`From`].
- ///
- /// Constructing and pinning a `Box` with <code>Box::into_pin([Box::new]\(x))</code>
- /// can also be written more concisely using <code>[Box::pin]\(x)</code>.
- /// This `into_pin` method is useful if you already have a `Box<T>`, or you are
- /// constructing a (pinned) `Box` in a different way than with [`Box::new`].
- ///
- /// # Notes
- ///
- /// It's not recommended that crates add an impl like `From<Box<T>> for Pin<T>`,
- /// as it'll introduce an ambiguity when calling `Pin::from`.
- /// A demonstration of such a poor impl is shown below.
- ///
- /// ```compile_fail
- /// # use std::pin::Pin;
- /// struct Foo; // A type defined in this crate.
- /// impl From<Box<()>> for Pin<Foo> {
- /// fn from(_: Box<()>) -> Pin<Foo> {
- /// Pin::new(Foo)
- /// }
- /// }
- ///
- /// let foo = Box::new(());
- /// let bar = Pin::from(foo);
- /// ```
- #[stable(feature = "box_into_pin", since = "1.63.0")]
- #[rustc_const_unstable(feature = "const_box", issue = "92521")]
- pub const fn into_pin(boxed: Self) -> Pin<Self>
- where
- A: 'static,
- {
- // It's not possible to move or replace the insides of a `Pin<Box<T>>`
- // when `T: !Unpin`, so it's safe to pin it directly without any
- // additional requirements.
- unsafe { Pin::new_unchecked(boxed) }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
- #[inline]
- fn drop(&mut self) {
- // the T in the Box is dropped by the compiler before the destructor is run
-
- let ptr = self.0;
-
- unsafe {
- let layout = Layout::for_value_raw(ptr.as_ptr());
- if layout.size() != 0 {
- self.1.deallocate(From::from(ptr.cast()), layout);
- }
- }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Default> Default for Box<T> {
- /// Creates a `Box<T>`, with the `Default` value for T.
- #[inline]
- fn default() -> Self {
- Box::new(T::default())
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Default for Box<[T]> {
- #[inline]
- fn default() -> Self {
- let ptr: Unique<[T]> = Unique::<[T; 0]>::dangling();
- Box(ptr, Global)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "default_box_extra", since = "1.17.0")]
-impl Default for Box<str> {
- #[inline]
- fn default() -> Self {
- // SAFETY: This is the same as `Unique::cast<U>` but with an unsized `U = str`.
- let ptr: Unique<str> = unsafe {
- let bytes: Unique<[u8]> = Unique::<[u8; 0]>::dangling();
- Unique::new_unchecked(bytes.as_ptr() as *mut str)
- };
- Box(ptr, Global)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone, A: Allocator + Clone> Clone for Box<T, A> {
- /// Returns a new box with a `clone()` of this box's contents.
- ///
- /// # Examples
- ///
- /// ```
- /// let x = Box::new(5);
- /// let y = x.clone();
- ///
- /// // The value is the same
- /// assert_eq!(x, y);
- ///
- /// // But they are unique objects
- /// assert_ne!(&*x as *const i32, &*y as *const i32);
- /// ```
- #[inline]
- fn clone(&self) -> Self {
- // Pre-allocate memory to allow writing the cloned value directly.
- let mut boxed = Self::new_uninit_in(self.1.clone());
- unsafe {
- (**self).write_clone_into_raw(boxed.as_mut_ptr());
- boxed.assume_init()
- }
- }
-
- /// Copies `source`'s contents into `self` without creating a new allocation.
- ///
- /// # Examples
- ///
- /// ```
- /// let x = Box::new(5);
- /// let mut y = Box::new(10);
- /// let yp: *const i32 = &*y;
- ///
- /// y.clone_from(&x);
- ///
- /// // The value is the same
- /// assert_eq!(x, y);
- ///
- /// // And no allocation occurred
- /// assert_eq!(yp, &*y);
- /// ```
- #[inline]
- fn clone_from(&mut self, source: &Self) {
- (**self).clone_from(&(**source));
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "box_slice_clone", since = "1.3.0")]
-impl Clone for Box<str> {
- fn clone(&self) -> Self {
- // this makes a copy of the data
- let buf: Box<[u8]> = self.as_bytes().into();
- unsafe { from_boxed_utf8_unchecked(buf) }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Box<T, A> {
- #[inline]
- fn eq(&self, other: &Self) -> bool {
- PartialEq::eq(&**self, &**other)
- }
- #[inline]
- fn ne(&self, other: &Self) -> bool {
- PartialEq::ne(&**self, &**other)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Box<T, A> {
- #[inline]
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- PartialOrd::partial_cmp(&**self, &**other)
- }
- #[inline]
- fn lt(&self, other: &Self) -> bool {
- PartialOrd::lt(&**self, &**other)
- }
- #[inline]
- fn le(&self, other: &Self) -> bool {
- PartialOrd::le(&**self, &**other)
- }
- #[inline]
- fn ge(&self, other: &Self) -> bool {
- PartialOrd::ge(&**self, &**other)
- }
- #[inline]
- fn gt(&self, other: &Self) -> bool {
- PartialOrd::gt(&**self, &**other)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Ord, A: Allocator> Ord for Box<T, A> {
- #[inline]
- fn cmp(&self, other: &Self) -> Ordering {
- Ord::cmp(&**self, &**other)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Eq, A: Allocator> Eq for Box<T, A> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Hash, A: Allocator> Hash for Box<T, A> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- (**self).hash(state);
- }
-}
-
-#[stable(feature = "indirect_hasher_impl", since = "1.22.0")]
-impl<T: ?Sized + Hasher, A: Allocator> Hasher for Box<T, A> {
- fn finish(&self) -> u64 {
- (**self).finish()
- }
- fn write(&mut self, bytes: &[u8]) {
- (**self).write(bytes)
- }
- fn write_u8(&mut self, i: u8) {
- (**self).write_u8(i)
- }
- fn write_u16(&mut self, i: u16) {
- (**self).write_u16(i)
- }
- fn write_u32(&mut self, i: u32) {
- (**self).write_u32(i)
- }
- fn write_u64(&mut self, i: u64) {
- (**self).write_u64(i)
- }
- fn write_u128(&mut self, i: u128) {
- (**self).write_u128(i)
- }
- fn write_usize(&mut self, i: usize) {
- (**self).write_usize(i)
- }
- fn write_i8(&mut self, i: i8) {
- (**self).write_i8(i)
- }
- fn write_i16(&mut self, i: i16) {
- (**self).write_i16(i)
- }
- fn write_i32(&mut self, i: i32) {
- (**self).write_i32(i)
- }
- fn write_i64(&mut self, i: i64) {
- (**self).write_i64(i)
- }
- fn write_i128(&mut self, i: i128) {
- (**self).write_i128(i)
- }
- fn write_isize(&mut self, i: isize) {
- (**self).write_isize(i)
- }
- fn write_length_prefix(&mut self, len: usize) {
- (**self).write_length_prefix(len)
- }
- fn write_str(&mut self, s: &str) {
- (**self).write_str(s)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "from_for_ptrs", since = "1.6.0")]
-impl<T> From<T> for Box<T> {
- /// Converts a `T` into a `Box<T>`
- ///
- /// The conversion allocates on the heap and moves `t`
- /// from the stack into it.
- ///
- /// # Examples
- ///
- /// ```rust
- /// let x = 5;
- /// let boxed = Box::new(5);
- ///
- /// assert_eq!(Box::from(x), boxed);
- /// ```
- fn from(t: T) -> Self {
- Box::new(t)
- }
-}
-
-#[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Pin<Box<T, A>>
-where
- A: 'static,
-{
- /// Converts a `Box<T>` into a `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then
- /// `*boxed` will be pinned in memory and unable to be moved.
- ///
- /// This conversion does not allocate on the heap and happens in place.
- ///
- /// This is also available via [`Box::into_pin`].
- ///
- /// Constructing and pinning a `Box` with <code><Pin<Box\<T>>>::from([Box::new]\(x))</code>
- /// can also be written more concisely using <code>[Box::pin]\(x)</code>.
- /// This `From` implementation is useful if you already have a `Box<T>`, or you are
- /// constructing a (pinned) `Box` in a different way than with [`Box::new`].
- fn from(boxed: Box<T, A>) -> Self {
- Box::into_pin(boxed)
- }
-}
-
-/// Specialization trait used for `From<&[T]>`.
-#[cfg(not(no_global_oom_handling))]
-trait BoxFromSlice<T> {
- fn from_slice(slice: &[T]) -> Self;
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Clone> BoxFromSlice<T> for Box<[T]> {
- #[inline]
- default fn from_slice(slice: &[T]) -> Self {
- slice.to_vec().into_boxed_slice()
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Copy> BoxFromSlice<T> for Box<[T]> {
- #[inline]
- fn from_slice(slice: &[T]) -> Self {
- let len = slice.len();
- let buf = RawVec::with_capacity(len);
- unsafe {
- ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
- buf.into_box(slice.len()).assume_init()
- }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "box_from_slice", since = "1.17.0")]
-impl<T: Clone> From<&[T]> for Box<[T]> {
- /// Converts a `&[T]` into a `Box<[T]>`
- ///
- /// This conversion allocates on the heap
- /// and performs a copy of `slice` and its contents.
- ///
- /// # Examples
- /// ```rust
- /// // create a &[u8] which will be used to create a Box<[u8]>
- /// let slice: &[u8] = &[104, 101, 108, 108, 111];
- /// let boxed_slice: Box<[u8]> = Box::from(slice);
- ///
- /// println!("{boxed_slice:?}");
- /// ```
- #[inline]
- fn from(slice: &[T]) -> Box<[T]> {
- <Self as BoxFromSlice<T>>::from_slice(slice)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "box_from_cow", since = "1.45.0")]
-impl<T: Clone> From<Cow<'_, [T]>> for Box<[T]> {
- /// Converts a `Cow<'_, [T]>` into a `Box<[T]>`
- ///
- /// When `cow` is the `Cow::Borrowed` variant, this
- /// conversion allocates on the heap and copies the
- /// underlying slice. Otherwise, it will try to reuse the owned
- /// `Vec`'s allocation.
- #[inline]
- fn from(cow: Cow<'_, [T]>) -> Box<[T]> {
- match cow {
- Cow::Borrowed(slice) => Box::from(slice),
- Cow::Owned(slice) => Box::from(slice),
- }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "box_from_slice", since = "1.17.0")]
-impl From<&str> for Box<str> {
- /// Converts a `&str` into a `Box<str>`
- ///
- /// This conversion allocates on the heap
- /// and performs a copy of `s`.
- ///
- /// # Examples
- ///
- /// ```rust
- /// let boxed: Box<str> = Box::from("hello");
- /// println!("{boxed}");
- /// ```
- #[inline]
- fn from(s: &str) -> Box<str> {
- unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "box_from_cow", since = "1.45.0")]
-impl From<Cow<'_, str>> for Box<str> {
- /// Converts a `Cow<'_, str>` into a `Box<str>`
- ///
- /// When `cow` is the `Cow::Borrowed` variant, this
- /// conversion allocates on the heap and copies the
- /// underlying `str`. Otherwise, it will try to reuse the owned
- /// `String`'s allocation.
- ///
- /// # Examples
- ///
- /// ```rust
- /// use std::borrow::Cow;
- ///
- /// let unboxed = Cow::Borrowed("hello");
- /// let boxed: Box<str> = Box::from(unboxed);
- /// println!("{boxed}");
- /// ```
- ///
- /// ```rust
- /// # use std::borrow::Cow;
- /// let unboxed = Cow::Owned("hello".to_string());
- /// let boxed: Box<str> = Box::from(unboxed);
- /// println!("{boxed}");
- /// ```
- #[inline]
- fn from(cow: Cow<'_, str>) -> Box<str> {
- match cow {
- Cow::Borrowed(s) => Box::from(s),
- Cow::Owned(s) => Box::from(s),
- }
- }
-}
-
-#[stable(feature = "boxed_str_conv", since = "1.19.0")]
-impl<A: Allocator> From<Box<str, A>> for Box<[u8], A> {
- /// Converts a `Box<str>` into a `Box<[u8]>`
- ///
- /// This conversion does not allocate on the heap and happens in place.
- ///
- /// # Examples
- /// ```rust
- /// // create a Box<str> which will be used to create a Box<[u8]>
- /// let boxed: Box<str> = Box::from("hello");
- /// let boxed_str: Box<[u8]> = Box::from(boxed);
- ///
- /// // create a &[u8] which will be used to create a Box<[u8]>
- /// let slice: &[u8] = &[104, 101, 108, 108, 111];
- /// let boxed_slice = Box::from(slice);
- ///
- /// assert_eq!(boxed_slice, boxed_str);
- /// ```
- #[inline]
- fn from(s: Box<str, A>) -> Self {
- let (raw, alloc) = Box::into_raw_with_allocator(s);
- unsafe { Box::from_raw_in(raw as *mut [u8], alloc) }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "box_from_array", since = "1.45.0")]
-impl<T, const N: usize> From<[T; N]> for Box<[T]> {
- /// Converts a `[T; N]` into a `Box<[T]>`
- ///
- /// This conversion moves the array to newly heap-allocated memory.
- ///
- /// # Examples
- ///
- /// ```rust
- /// let boxed: Box<[u8]> = Box::from([4, 2]);
- /// println!("{boxed:?}");
- /// ```
- fn from(array: [T; N]) -> Box<[T]> {
- Box::new(array)
- }
-}
-
-/// Casts a boxed slice to a boxed array.
-///
-/// # Safety
-///
-/// `boxed_slice.len()` must be exactly `N`.
-unsafe fn boxed_slice_as_array_unchecked<T, A: Allocator, const N: usize>(
- boxed_slice: Box<[T], A>,
-) -> Box<[T; N], A> {
- debug_assert_eq!(boxed_slice.len(), N);
-
- let (ptr, alloc) = Box::into_raw_with_allocator(boxed_slice);
- // SAFETY: Pointer and allocator came from an existing box,
- // and our safety condition requires that the length is exactly `N`
- unsafe { Box::from_raw_in(ptr as *mut [T; N], alloc) }
-}
-
-#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
-impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
- type Error = Box<[T]>;
-
- /// Attempts to convert a `Box<[T]>` into a `Box<[T; N]>`.
- ///
- /// The conversion occurs in-place and does not require a
- /// new memory allocation.
- ///
- /// # Errors
- ///
- /// Returns the old `Box<[T]>` in the `Err` variant if
- /// `boxed_slice.len()` does not equal `N`.
- fn try_from(boxed_slice: Box<[T]>) -> Result<Self, Self::Error> {
- if boxed_slice.len() == N {
- Ok(unsafe { boxed_slice_as_array_unchecked(boxed_slice) })
- } else {
- Err(boxed_slice)
- }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "boxed_array_try_from_vec", since = "1.66.0")]
-impl<T, const N: usize> TryFrom<Vec<T>> for Box<[T; N]> {
- type Error = Vec<T>;
-
- /// Attempts to convert a `Vec<T>` into a `Box<[T; N]>`.
- ///
- /// Like [`Vec::into_boxed_slice`], this is in-place if `vec.capacity() == N`,
- /// but will require a reallocation otherwise.
- ///
- /// # Errors
- ///
- /// Returns the original `Vec<T>` in the `Err` variant if
- /// `boxed_slice.len()` does not equal `N`.
- ///
- /// # Examples
- ///
- /// This can be used with [`vec!`] to create an array on the heap:
- ///
- /// ```
- /// let state: Box<[f32; 100]> = vec![1.0; 100].try_into().unwrap();
- /// assert_eq!(state.len(), 100);
- /// ```
- fn try_from(vec: Vec<T>) -> Result<Self, Self::Error> {
- if vec.len() == N {
- let boxed_slice = vec.into_boxed_slice();
- Ok(unsafe { boxed_slice_as_array_unchecked(boxed_slice) })
- } else {
- Err(vec)
- }
- }
-}
-
-impl<A: Allocator> Box<dyn Any, A> {
- /// Attempt to downcast the box to a concrete type.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::any::Any;
- ///
- /// fn print_if_string(value: Box<dyn Any>) {
- /// if let Ok(string) = value.downcast::<String>() {
- /// println!("String ({}): {}", string.len(), string);
- /// }
- /// }
- ///
- /// let my_string = "Hello World".to_string();
- /// print_if_string(Box::new(my_string));
- /// print_if_string(Box::new(0i8));
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
- if self.is::<T>() { unsafe { Ok(self.downcast_unchecked::<T>()) } } else { Err(self) }
- }
-
- /// Downcasts the box to a concrete type.
- ///
- /// For a safe alternative see [`downcast`].
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(downcast_unchecked)]
- ///
- /// use std::any::Any;
- ///
- /// let x: Box<dyn Any> = Box::new(1_usize);
- ///
- /// unsafe {
- /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
- /// }
- /// ```
- ///
- /// # Safety
- ///
- /// The contained value must be of type `T`. Calling this method
- /// with the incorrect type is *undefined behavior*.
- ///
- /// [`downcast`]: Self::downcast
- #[inline]
- #[unstable(feature = "downcast_unchecked", issue = "90850")]
- pub unsafe fn downcast_unchecked<T: Any>(self) -> Box<T, A> {
- debug_assert!(self.is::<T>());
- unsafe {
- let (raw, alloc): (*mut dyn Any, _) = Box::into_raw_with_allocator(self);
- Box::from_raw_in(raw as *mut T, alloc)
- }
- }
-}
-
-impl<A: Allocator> Box<dyn Any + Send, A> {
- /// Attempt to downcast the box to a concrete type.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::any::Any;
- ///
- /// fn print_if_string(value: Box<dyn Any + Send>) {
- /// if let Ok(string) = value.downcast::<String>() {
- /// println!("String ({}): {}", string.len(), string);
- /// }
- /// }
- ///
- /// let my_string = "Hello World".to_string();
- /// print_if_string(Box::new(my_string));
- /// print_if_string(Box::new(0i8));
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
- if self.is::<T>() { unsafe { Ok(self.downcast_unchecked::<T>()) } } else { Err(self) }
- }
-
- /// Downcasts the box to a concrete type.
- ///
- /// For a safe alternative see [`downcast`].
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(downcast_unchecked)]
- ///
- /// use std::any::Any;
- ///
- /// let x: Box<dyn Any + Send> = Box::new(1_usize);
- ///
- /// unsafe {
- /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
- /// }
- /// ```
- ///
- /// # Safety
- ///
- /// The contained value must be of type `T`. Calling this method
- /// with the incorrect type is *undefined behavior*.
- ///
- /// [`downcast`]: Self::downcast
- #[inline]
- #[unstable(feature = "downcast_unchecked", issue = "90850")]
- pub unsafe fn downcast_unchecked<T: Any>(self) -> Box<T, A> {
- debug_assert!(self.is::<T>());
- unsafe {
- let (raw, alloc): (*mut (dyn Any + Send), _) = Box::into_raw_with_allocator(self);
- Box::from_raw_in(raw as *mut T, alloc)
- }
- }
-}
-
-impl<A: Allocator> Box<dyn Any + Send + Sync, A> {
- /// Attempt to downcast the box to a concrete type.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::any::Any;
- ///
- /// fn print_if_string(value: Box<dyn Any + Send + Sync>) {
- /// if let Ok(string) = value.downcast::<String>() {
- /// println!("String ({}): {}", string.len(), string);
- /// }
- /// }
- ///
- /// let my_string = "Hello World".to_string();
- /// print_if_string(Box::new(my_string));
- /// print_if_string(Box::new(0i8));
- /// ```
- #[inline]
- #[stable(feature = "box_send_sync_any_downcast", since = "1.51.0")]
- pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
- if self.is::<T>() { unsafe { Ok(self.downcast_unchecked::<T>()) } } else { Err(self) }
- }
-
- /// Downcasts the box to a concrete type.
- ///
- /// For a safe alternative see [`downcast`].
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(downcast_unchecked)]
- ///
- /// use std::any::Any;
- ///
- /// let x: Box<dyn Any + Send + Sync> = Box::new(1_usize);
- ///
- /// unsafe {
- /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
- /// }
- /// ```
- ///
- /// # Safety
- ///
- /// The contained value must be of type `T`. Calling this method
- /// with the incorrect type is *undefined behavior*.
- ///
- /// [`downcast`]: Self::downcast
- #[inline]
- #[unstable(feature = "downcast_unchecked", issue = "90850")]
- pub unsafe fn downcast_unchecked<T: Any>(self) -> Box<T, A> {
- debug_assert!(self.is::<T>());
- unsafe {
- let (raw, alloc): (*mut (dyn Any + Send + Sync), _) =
- Box::into_raw_with_allocator(self);
- Box::from_raw_in(raw as *mut T, alloc)
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: fmt::Display + ?Sized, A: Allocator> fmt::Display for Box<T, A> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(&**self, f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: fmt::Debug + ?Sized, A: Allocator> fmt::Debug for Box<T, A> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt(&**self, f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized, A: Allocator> fmt::Pointer for Box<T, A> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- // It's not possible to extract the inner Uniq directly from the Box,
- // instead we cast it to a *const which aliases the Unique
- let ptr: *const T = &**self;
- fmt::Pointer::fmt(&ptr, f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized, A: Allocator> Deref for Box<T, A> {
- type Target = T;
-
- fn deref(&self) -> &T {
- &**self
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized, A: Allocator> DerefMut for Box<T, A> {
- fn deref_mut(&mut self) -> &mut T {
- &mut **self
- }
-}
-
-#[unstable(feature = "receiver_trait", issue = "none")]
-impl<T: ?Sized, A: Allocator> Receiver for Box<T, A> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<I: Iterator + ?Sized, A: Allocator> Iterator for Box<I, A> {
- type Item = I::Item;
- fn next(&mut self) -> Option<I::Item> {
- (**self).next()
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- (**self).size_hint()
- }
- fn nth(&mut self, n: usize) -> Option<I::Item> {
- (**self).nth(n)
- }
- fn last(self) -> Option<I::Item> {
- BoxIter::last(self)
- }
-}
-
-trait BoxIter {
- type Item;
- fn last(self) -> Option<Self::Item>;
-}
-
-impl<I: Iterator + ?Sized, A: Allocator> BoxIter for Box<I, A> {
- type Item = I::Item;
- default fn last(self) -> Option<I::Item> {
- #[inline]
- fn some<T>(_: Option<T>, x: T) -> Option<T> {
- Some(x)
- }
-
- self.fold(None, some)
- }
-}
-
-/// Specialization for sized `I`s that uses `I`s implementation of `last()`
-/// instead of the default.
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<I: Iterator, A: Allocator> BoxIter for Box<I, A> {
- fn last(self) -> Option<I::Item> {
- (*self).last()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<I: DoubleEndedIterator + ?Sized, A: Allocator> DoubleEndedIterator for Box<I, A> {
- fn next_back(&mut self) -> Option<I::Item> {
- (**self).next_back()
- }
- fn nth_back(&mut self, n: usize) -> Option<I::Item> {
- (**self).nth_back(n)
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<I: ExactSizeIterator + ?Sized, A: Allocator> ExactSizeIterator for Box<I, A> {
- fn len(&self) -> usize {
- (**self).len()
- }
- fn is_empty(&self) -> bool {
- (**self).is_empty()
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<I: FusedIterator + ?Sized, A: Allocator> FusedIterator for Box<I, A> {}
-
-#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
-impl<Args: Tuple, F: FnOnce<Args> + ?Sized, A: Allocator> FnOnce<Args> for Box<F, A> {
- type Output = <F as FnOnce<Args>>::Output;
-
- extern "rust-call" fn call_once(self, args: Args) -> Self::Output {
- <F as FnOnce<Args>>::call_once(*self, args)
- }
-}
-
-#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
-impl<Args: Tuple, F: FnMut<Args> + ?Sized, A: Allocator> FnMut<Args> for Box<F, A> {
- extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output {
- <F as FnMut<Args>>::call_mut(self, args)
- }
-}
-
-#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
-impl<Args: Tuple, F: Fn<Args> + ?Sized, A: Allocator> Fn<Args> for Box<F, A> {
- extern "rust-call" fn call(&self, args: Args) -> Self::Output {
- <F as Fn<Args>>::call(self, args)
- }
-}
-
-#[unstable(feature = "coerce_unsized", issue = "18598")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
-
-#[unstable(feature = "dispatch_from_dyn", issue = "none")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T, Global> {}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "boxed_slice_from_iter", since = "1.32.0")]
-impl<I> FromIterator<I> for Box<[I]> {
- fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
- iter.into_iter().collect::<Vec<_>>().into_boxed_slice()
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "box_slice_clone", since = "1.3.0")]
-impl<T: Clone, A: Allocator + Clone> Clone for Box<[T], A> {
- fn clone(&self) -> Self {
- let alloc = Box::allocator(self).clone();
- self.to_vec_in(alloc).into_boxed_slice()
- }
-
- fn clone_from(&mut self, other: &Self) {
- if self.len() == other.len() {
- self.clone_from_slice(&other);
- } else {
- *self = other.clone();
- }
- }
-}
-
-#[stable(feature = "box_borrow", since = "1.1.0")]
-impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Box<T, A> {
- fn borrow(&self) -> &T {
- &**self
- }
-}
-
-#[stable(feature = "box_borrow", since = "1.1.0")]
-impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for Box<T, A> {
- fn borrow_mut(&mut self) -> &mut T {
- &mut **self
- }
-}
-
-#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
-impl<T: ?Sized, A: Allocator> AsRef<T> for Box<T, A> {
- fn as_ref(&self) -> &T {
- &**self
- }
-}
-
-#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
-impl<T: ?Sized, A: Allocator> AsMut<T> for Box<T, A> {
- fn as_mut(&mut self) -> &mut T {
- &mut **self
- }
-}
-
-/* Nota bene
- *
- * We could have chosen not to add this impl, and instead have written a
- * function of Pin<Box<T>> to Pin<T>. Such a function would not be sound,
- * because Box<T> implements Unpin even when T does not, as a result of
- * this impl.
- *
- * We chose this API instead of the alternative for a few reasons:
- * - Logically, it is helpful to understand pinning in regard to the
- * memory region being pointed to. For this reason none of the
- * standard library pointer types support projecting through a pin
- * (Box<T> is the only pointer type in std for which this would be
- * safe.)
- * - It is in practice very useful to have Box<T> be unconditionally
- * Unpin because of trait objects, for which the structural auto
- * trait functionality does not apply (e.g., Box<dyn Foo> would
- * otherwise not be Unpin).
- *
- * Another type with the same semantics as Box but only a conditional
- * implementation of `Unpin` (where `T: Unpin`) would be valid/safe, and
- * could have a method to project a Pin<T> from it.
- */
-#[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized, A: Allocator> Unpin for Box<T, A> where A: 'static {}
-
-#[unstable(feature = "coroutine_trait", issue = "43122")]
-impl<G: ?Sized + Coroutine<R> + Unpin, R, A: Allocator> Coroutine<R> for Box<G, A>
-where
- A: 'static,
-{
- type Yield = G::Yield;
- type Return = G::Return;
-
- fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState<Self::Yield, Self::Return> {
- G::resume(Pin::new(&mut *self), arg)
- }
-}
-
-#[unstable(feature = "coroutine_trait", issue = "43122")]
-impl<G: ?Sized + Coroutine<R>, R, A: Allocator> Coroutine<R> for Pin<Box<G, A>>
-where
- A: 'static,
-{
- type Yield = G::Yield;
- type Return = G::Return;
-
- fn resume(mut self: Pin<&mut Self>, arg: R) -> CoroutineState<Self::Yield, Self::Return> {
- G::resume((*self).as_mut(), arg)
- }
-}
-
-#[stable(feature = "futures_api", since = "1.36.0")]
-impl<F: ?Sized + Future + Unpin, A: Allocator> Future for Box<F, A>
-where
- A: 'static,
-{
- type Output = F::Output;
-
- fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
- F::poll(Pin::new(&mut *self), cx)
- }
-}
-
-#[unstable(feature = "async_iterator", issue = "79024")]
-impl<S: ?Sized + AsyncIterator + Unpin> AsyncIterator for Box<S> {
- type Item = S::Item;
-
- fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
- Pin::new(&mut **self).poll_next(cx)
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (**self).size_hint()
- }
-}
-
-impl dyn Error {
- #[inline]
- #[stable(feature = "error_downcast", since = "1.3.0")]
- #[rustc_allow_incoherent_impl]
- /// Attempts to downcast the box to a concrete type.
- pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error>> {
- if self.is::<T>() {
- unsafe {
- let raw: *mut dyn Error = Box::into_raw(self);
- Ok(Box::from_raw(raw as *mut T))
- }
- } else {
- Err(self)
- }
- }
-}
-
-impl dyn Error + Send {
- #[inline]
- #[stable(feature = "error_downcast", since = "1.3.0")]
- #[rustc_allow_incoherent_impl]
- /// Attempts to downcast the box to a concrete type.
- pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
- let err: Box<dyn Error> = self;
- <dyn Error>::downcast(err).map_err(|s| unsafe {
- // Reapply the `Send` marker.
- Box::from_raw(Box::into_raw(s) as *mut (dyn Error + Send))
- })
- }
-}
-
-impl dyn Error + Send + Sync {
- #[inline]
- #[stable(feature = "error_downcast", since = "1.3.0")]
- #[rustc_allow_incoherent_impl]
- /// Attempts to downcast the box to a concrete type.
- pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<Self>> {
- let err: Box<dyn Error> = self;
- <dyn Error>::downcast(err).map_err(|s| unsafe {
- // Reapply the `Send + Sync` marker.
- Box::from_raw(Box::into_raw(s) as *mut (dyn Error + Send + Sync))
- })
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, E: Error + 'a> From<E> for Box<dyn Error + 'a> {
- /// Converts a type of [`Error`] into a box of dyn [`Error`].
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::fmt;
- /// use std::mem;
- ///
- /// #[derive(Debug)]
- /// struct AnError;
- ///
- /// impl fmt::Display for AnError {
- /// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- /// write!(f, "An error")
- /// }
- /// }
- ///
- /// impl Error for AnError {}
- ///
- /// let an_error = AnError;
- /// assert!(0 == mem::size_of_val(&an_error));
- /// let a_boxed_error = Box::<dyn Error>::from(an_error);
- /// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- fn from(err: E) -> Box<dyn Error + 'a> {
- Box::new(err)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, E: Error + Send + Sync + 'a> From<E> for Box<dyn Error + Send + Sync + 'a> {
- /// Converts a type of [`Error`] + [`Send`] + [`Sync`] into a box of
- /// dyn [`Error`] + [`Send`] + [`Sync`].
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::fmt;
- /// use std::mem;
- ///
- /// #[derive(Debug)]
- /// struct AnError;
- ///
- /// impl fmt::Display for AnError {
- /// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- /// write!(f, "An error")
- /// }
- /// }
- ///
- /// impl Error for AnError {}
- ///
- /// unsafe impl Send for AnError {}
- ///
- /// unsafe impl Sync for AnError {}
- ///
- /// let an_error = AnError;
- /// assert!(0 == mem::size_of_val(&an_error));
- /// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(an_error);
- /// assert!(
- /// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- fn from(err: E) -> Box<dyn Error + Send + Sync + 'a> {
- Box::new(err)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl From<String> for Box<dyn Error + Send + Sync> {
- /// Converts a [`String`] into a box of dyn [`Error`] + [`Send`] + [`Sync`].
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::mem;
- ///
- /// let a_string_error = "a string error".to_string();
- /// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(a_string_error);
- /// assert!(
- /// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- #[inline]
- fn from(err: String) -> Box<dyn Error + Send + Sync> {
- struct StringError(String);
-
- impl Error for StringError {
- #[allow(deprecated)]
- fn description(&self) -> &str {
- &self.0
- }
- }
-
- impl fmt::Display for StringError {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(&self.0, f)
- }
- }
-
- // Purposefully skip printing "StringError(..)"
- impl fmt::Debug for StringError {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt(&self.0, f)
- }
- }
-
- Box::new(StringError(err))
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "string_box_error", since = "1.6.0")]
-impl From<String> for Box<dyn Error> {
- /// Converts a [`String`] into a box of dyn [`Error`].
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::mem;
- ///
- /// let a_string_error = "a string error".to_string();
- /// let a_boxed_error = Box::<dyn Error>::from(a_string_error);
- /// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- fn from(str_err: String) -> Box<dyn Error> {
- let err1: Box<dyn Error + Send + Sync> = From::from(str_err);
- let err2: Box<dyn Error> = err1;
- err2
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a> From<&str> for Box<dyn Error + Send + Sync + 'a> {
- /// Converts a [`str`] into a box of dyn [`Error`] + [`Send`] + [`Sync`].
- ///
- /// [`str`]: prim@str
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::mem;
- ///
- /// let a_str_error = "a str error";
- /// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(a_str_error);
- /// assert!(
- /// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- #[inline]
- fn from(err: &str) -> Box<dyn Error + Send + Sync + 'a> {
- From::from(String::from(err))
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "string_box_error", since = "1.6.0")]
-impl From<&str> for Box<dyn Error> {
- /// Converts a [`str`] into a box of dyn [`Error`].
- ///
- /// [`str`]: prim@str
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::mem;
- ///
- /// let a_str_error = "a str error";
- /// let a_boxed_error = Box::<dyn Error>::from(a_str_error);
- /// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- fn from(err: &str) -> Box<dyn Error> {
- From::from(String::from(err))
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "cow_box_error", since = "1.22.0")]
-impl<'a, 'b> From<Cow<'b, str>> for Box<dyn Error + Send + Sync + 'a> {
- /// Converts a [`Cow`] into a box of dyn [`Error`] + [`Send`] + [`Sync`].
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::mem;
- /// use std::borrow::Cow;
- ///
- /// let a_cow_str_error = Cow::from("a str error");
- /// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(a_cow_str_error);
- /// assert!(
- /// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- fn from(err: Cow<'b, str>) -> Box<dyn Error + Send + Sync + 'a> {
- From::from(String::from(err))
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "cow_box_error", since = "1.22.0")]
-impl<'a> From<Cow<'a, str>> for Box<dyn Error> {
- /// Converts a [`Cow`] into a box of dyn [`Error`].
- ///
- /// # Examples
- ///
- /// ```
- /// use std::error::Error;
- /// use std::mem;
- /// use std::borrow::Cow;
- ///
- /// let a_cow_str_error = Cow::from("a str error");
- /// let a_boxed_error = Box::<dyn Error>::from(a_cow_str_error);
- /// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
- /// ```
- fn from(err: Cow<'a, str>) -> Box<dyn Error> {
- From::from(String::from(err))
- }
-}
-
-#[stable(feature = "box_error", since = "1.8.0")]
-impl<T: core::error::Error> core::error::Error for Box<T> {
- #[allow(deprecated, deprecated_in_future)]
- fn description(&self) -> &str {
- core::error::Error::description(&**self)
- }
-
- #[allow(deprecated)]
- fn cause(&self) -> Option<&dyn core::error::Error> {
- core::error::Error::cause(&**self)
- }
-
- fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
- core::error::Error::source(&**self)
- }
-
- fn provide<'b>(&'b self, request: &mut core::error::Request<'b>) {
- core::error::Error::provide(&**self, request);
- }
-}
diff --git a/rust/alloc/collections/mod.rs b/rust/alloc/collections/mod.rs
deleted file mode 100644
index 00ffb3b973..0000000000
--- a/rust/alloc/collections/mod.rs
+++ /dev/null
@@ -1,160 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-//! Collection types.
-
-#![stable(feature = "rust1", since = "1.0.0")]
-
-#[cfg(not(no_global_oom_handling))]
-pub mod binary_heap;
-#[cfg(not(no_global_oom_handling))]
-mod btree;
-#[cfg(not(no_global_oom_handling))]
-pub mod linked_list;
-#[cfg(not(no_global_oom_handling))]
-pub mod vec_deque;
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-pub mod btree_map {
- //! An ordered map based on a B-Tree.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub use super::btree::map::*;
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-pub mod btree_set {
- //! An ordered set based on a B-Tree.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub use super::btree::set::*;
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[doc(no_inline)]
-pub use binary_heap::BinaryHeap;
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[doc(no_inline)]
-pub use btree_map::BTreeMap;
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[doc(no_inline)]
-pub use btree_set::BTreeSet;
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[doc(no_inline)]
-pub use linked_list::LinkedList;
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[doc(no_inline)]
-pub use vec_deque::VecDeque;
-
-use crate::alloc::{Layout, LayoutError};
-use core::fmt::Display;
-
-/// The error type for `try_reserve` methods.
-#[derive(Clone, PartialEq, Eq, Debug)]
-#[stable(feature = "try_reserve", since = "1.57.0")]
-pub struct TryReserveError {
- kind: TryReserveErrorKind,
-}
-
-impl TryReserveError {
- /// Details about the allocation that caused the error
- #[inline]
- #[must_use]
- #[unstable(
- feature = "try_reserve_kind",
- reason = "Uncertain how much info should be exposed",
- issue = "48043"
- )]
- pub fn kind(&self) -> TryReserveErrorKind {
- self.kind.clone()
- }
-}
-
-/// Details of the allocation that caused a `TryReserveError`
-#[derive(Clone, PartialEq, Eq, Debug)]
-#[unstable(
- feature = "try_reserve_kind",
- reason = "Uncertain how much info should be exposed",
- issue = "48043"
-)]
-pub enum TryReserveErrorKind {
- /// Error due to the computed capacity exceeding the collection's maximum
- /// (usually `isize::MAX` bytes).
- CapacityOverflow,
-
- /// The memory allocator returned an error
- AllocError {
- /// The layout of allocation request that failed
- layout: Layout,
-
- #[doc(hidden)]
- #[unstable(
- feature = "container_error_extra",
- issue = "none",
- reason = "\
- Enable exposing the allocator’s custom error value \
- if an associated type is added in the future: \
- https://github.com/rust-lang/wg-allocators/issues/23"
- )]
- non_exhaustive: (),
- },
-}
-
-#[unstable(
- feature = "try_reserve_kind",
- reason = "Uncertain how much info should be exposed",
- issue = "48043"
-)]
-impl From<TryReserveErrorKind> for TryReserveError {
- #[inline]
- fn from(kind: TryReserveErrorKind) -> Self {
- Self { kind }
- }
-}
-
-#[unstable(feature = "try_reserve_kind", reason = "new API", issue = "48043")]
-impl From<LayoutError> for TryReserveErrorKind {
- /// Always evaluates to [`TryReserveErrorKind::CapacityOverflow`].
- #[inline]
- fn from(_: LayoutError) -> Self {
- TryReserveErrorKind::CapacityOverflow
- }
-}
-
-#[stable(feature = "try_reserve", since = "1.57.0")]
-impl Display for TryReserveError {
- fn fmt(
- &self,
- fmt: &mut core::fmt::Formatter<'_>,
- ) -> core::result::Result<(), core::fmt::Error> {
- fmt.write_str("memory allocation failed")?;
- let reason = match self.kind {
- TryReserveErrorKind::CapacityOverflow => {
- " because the computed capacity exceeded the collection's maximum"
- }
- TryReserveErrorKind::AllocError { .. } => {
- " because the memory allocator returned an error"
- }
- };
- fmt.write_str(reason)
- }
-}
-
-/// An intermediate trait for specialization of `Extend`.
-#[doc(hidden)]
-#[cfg(not(no_global_oom_handling))]
-trait SpecExtend<I: IntoIterator> {
- /// Extends `self` with the contents of the given iterator.
- fn spec_extend(&mut self, iter: I);
-}
-
-#[stable(feature = "try_reserve", since = "1.57.0")]
-impl core::error::Error for TryReserveError {}
diff --git a/rust/alloc/lib.rs b/rust/alloc/lib.rs
deleted file mode 100644
index 36f79c0755..0000000000
--- a/rust/alloc/lib.rs
+++ /dev/null
@@ -1,288 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-//! # The Rust core allocation and collections library
-//!
-//! This library provides smart pointers and collections for managing
-//! heap-allocated values.
-//!
-//! This library, like core, normally doesn’t need to be used directly
-//! since its contents are re-exported in the [`std` crate](../std/index.html).
-//! Crates that use the `#![no_std]` attribute however will typically
-//! not depend on `std`, so they’d use this crate instead.
-//!
-//! ## Boxed values
-//!
-//! The [`Box`] type is a smart pointer type. There can only be one owner of a
-//! [`Box`], and the owner can decide to mutate the contents, which live on the
-//! heap.
-//!
-//! This type can be sent among threads efficiently as the size of a `Box` value
-//! is the same as that of a pointer. Tree-like data structures are often built
-//! with boxes because each node often has only one owner, the parent.
-//!
-//! ## Reference counted pointers
-//!
-//! The [`Rc`] type is a non-threadsafe reference-counted pointer type intended
-//! for sharing memory within a thread. An [`Rc`] pointer wraps a type, `T`, and
-//! only allows access to `&T`, a shared reference.
-//!
-//! This type is useful when inherited mutability (such as using [`Box`]) is too
-//! constraining for an application, and is often paired with the [`Cell`] or
-//! [`RefCell`] types in order to allow mutation.
-//!
-//! ## Atomically reference counted pointers
-//!
-//! The [`Arc`] type is the threadsafe equivalent of the [`Rc`] type. It
-//! provides all the same functionality of [`Rc`], except it requires that the
-//! contained type `T` is shareable. Additionally, [`Arc<T>`][`Arc`] is itself
-//! sendable while [`Rc<T>`][`Rc`] is not.
-//!
-//! This type allows for shared access to the contained data, and is often
-//! paired with synchronization primitives such as mutexes to allow mutation of
-//! shared resources.
-//!
-//! ## Collections
-//!
-//! Implementations of the most common general purpose data structures are
-//! defined in this library. They are re-exported through the
-//! [standard collections library](../std/collections/index.html).
-//!
-//! ## Heap interfaces
-//!
-//! The [`alloc`](alloc/index.html) module defines the low-level interface to the
-//! default global allocator. It is not compatible with the libc allocator API.
-//!
-//! [`Arc`]: sync
-//! [`Box`]: boxed
-//! [`Cell`]: core::cell
-//! [`Rc`]: rc
-//! [`RefCell`]: core::cell
-
-// To run alloc tests without x.py without ending up with two copies of alloc, Miri needs to be
-// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
-// rustc itself never sets the feature, so this line has no effect there.
-#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
-//
-#![allow(unused_attributes)]
-#![stable(feature = "alloc", since = "1.36.0")]
-#![doc(
- html_playground_url = "https://play.rust-lang.org/",
- issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
- test(no_crate_inject, attr(allow(unused_variables), deny(warnings)))
-)]
-#![doc(cfg_hide(
- not(test),
- not(any(test, bootstrap)),
- any(not(feature = "miri-test-libstd"), test, doctest),
- no_global_oom_handling,
- not(no_global_oom_handling),
- not(no_rc),
- not(no_sync),
- target_has_atomic = "ptr"
-))]
-#![doc(rust_logo)]
-#![feature(rustdoc_internals)]
-#![no_std]
-#![needs_allocator]
-// Lints:
-#![deny(unsafe_op_in_unsafe_fn)]
-#![deny(fuzzy_provenance_casts)]
-#![warn(deprecated_in_future)]
-#![warn(missing_debug_implementations)]
-#![warn(missing_docs)]
-#![allow(explicit_outlives_requirements)]
-#![warn(multiple_supertrait_upcastable)]
-#![allow(internal_features)]
-#![allow(rustdoc::redundant_explicit_links)]
-//
-// Library features:
-// tidy-alphabetical-start
-#![cfg_attr(not(no_global_oom_handling), feature(const_alloc_error))]
-#![cfg_attr(not(no_global_oom_handling), feature(const_btree_len))]
-#![cfg_attr(test, feature(is_sorted))]
-#![cfg_attr(test, feature(new_uninit))]
-#![feature(alloc_layout_extra)]
-#![feature(allocator_api)]
-#![feature(array_chunks)]
-#![feature(array_into_iter_constructors)]
-#![feature(array_methods)]
-#![feature(array_windows)]
-#![feature(ascii_char)]
-#![feature(assert_matches)]
-#![feature(async_iterator)]
-#![feature(coerce_unsized)]
-#![feature(const_align_of_val)]
-#![feature(const_box)]
-#![cfg_attr(not(no_borrow), feature(const_cow_is_borrowed))]
-#![feature(const_eval_select)]
-#![feature(const_maybe_uninit_as_mut_ptr)]
-#![feature(const_maybe_uninit_write)]
-#![feature(const_pin)]
-#![feature(const_refs_to_cell)]
-#![feature(const_size_of_val)]
-#![feature(const_waker)]
-#![feature(core_intrinsics)]
-#![feature(core_panic)]
-#![feature(deprecated_suggestion)]
-#![feature(dispatch_from_dyn)]
-#![feature(error_generic_member_access)]
-#![feature(error_in_core)]
-#![feature(exact_size_is_empty)]
-#![feature(extend_one)]
-#![feature(fmt_internals)]
-#![feature(fn_traits)]
-#![feature(hasher_prefixfree_extras)]
-#![feature(inline_const)]
-#![feature(inplace_iteration)]
-#![feature(iter_advance_by)]
-#![feature(iter_next_chunk)]
-#![feature(iter_repeat_n)]
-#![feature(layout_for_ptr)]
-#![feature(maybe_uninit_slice)]
-#![feature(maybe_uninit_uninit_array)]
-#![feature(maybe_uninit_uninit_array_transpose)]
-#![feature(pattern)]
-#![feature(ptr_internals)]
-#![feature(ptr_metadata)]
-#![feature(ptr_sub_ptr)]
-#![feature(receiver_trait)]
-#![feature(set_ptr_value)]
-#![feature(sized_type_properties)]
-#![feature(slice_from_ptr_range)]
-#![feature(slice_group_by)]
-#![feature(slice_ptr_get)]
-#![feature(slice_ptr_len)]
-#![feature(slice_range)]
-#![feature(std_internals)]
-#![feature(str_internals)]
-#![feature(strict_provenance)]
-#![feature(trusted_fused)]
-#![feature(trusted_len)]
-#![feature(trusted_random_access)]
-#![feature(try_trait_v2)]
-#![feature(tuple_trait)]
-#![feature(unchecked_math)]
-#![feature(unicode_internals)]
-#![feature(unsize)]
-#![feature(utf8_chunks)]
-// tidy-alphabetical-end
-//
-// Language features:
-// tidy-alphabetical-start
-#![cfg_attr(not(test), feature(coroutine_trait))]
-#![cfg_attr(test, feature(panic_update_hook))]
-#![cfg_attr(test, feature(test))]
-#![feature(allocator_internals)]
-#![feature(allow_internal_unstable)]
-#![feature(associated_type_bounds)]
-#![feature(c_unwind)]
-#![feature(cfg_sanitize)]
-#![feature(const_mut_refs)]
-#![feature(const_precise_live_drops)]
-#![feature(const_ptr_write)]
-#![feature(const_trait_impl)]
-#![feature(const_try)]
-#![feature(dropck_eyepatch)]
-#![feature(exclusive_range_pattern)]
-#![feature(fundamental)]
-#![feature(hashmap_internals)]
-#![feature(lang_items)]
-#![feature(min_specialization)]
-#![feature(multiple_supertrait_upcastable)]
-#![feature(negative_impls)]
-#![feature(never_type)]
-#![feature(pointer_is_aligned)]
-#![feature(rustc_allow_const_fn_unstable)]
-#![feature(rustc_attrs)]
-#![feature(slice_internals)]
-#![feature(staged_api)]
-#![feature(stmt_expr_attributes)]
-#![feature(unboxed_closures)]
-#![feature(unsized_fn_params)]
-#![feature(with_negative_coherence)]
-// tidy-alphabetical-end
-//
-// Rustdoc features:
-#![feature(doc_cfg)]
-#![feature(doc_cfg_hide)]
-// Technically, this is a bug in rustdoc: rustdoc sees the documentation on `#[lang = slice_alloc]`
-// blocks is for `&[T]`, which also has documentation using this feature in `core`, and gets mad
-// that the feature-gate isn't enabled. Ideally, it wouldn't check for the feature gate for docs
-// from other crates, but since this can only appear for lang items, it doesn't seem worth fixing.
-#![feature(intra_doc_pointers)]
-
-// Allow testing this library
-#[cfg(test)]
-#[macro_use]
-extern crate std;
-#[cfg(test)]
-extern crate test;
-#[cfg(test)]
-mod testing;
-
-// Module with internal macros used by other modules (needs to be included before other modules).
-#[cfg(not(no_macros))]
-#[macro_use]
-mod macros;
-
-mod raw_vec;
-
-// Heaps provided for low-level allocation strategies
-
-pub mod alloc;
-
-// Primitive types using the heaps above
-
-// Need to conditionally define the mod from `boxed.rs` to avoid
-// duplicating the lang-items when building in test cfg; but also need
-// to allow code to have `use boxed::Box;` declarations.
-#[cfg(not(test))]
-pub mod boxed;
-#[cfg(test)]
-mod boxed {
- pub use std::boxed::Box;
-}
-#[cfg(not(no_borrow))]
-pub mod borrow;
-pub mod collections;
-#[cfg(all(not(no_rc), not(no_sync), not(no_global_oom_handling)))]
-pub mod ffi;
-#[cfg(not(no_fmt))]
-pub mod fmt;
-#[cfg(not(no_rc))]
-pub mod rc;
-pub mod slice;
-#[cfg(not(no_str))]
-pub mod str;
-#[cfg(not(no_string))]
-pub mod string;
-#[cfg(all(not(no_rc), not(no_sync), target_has_atomic = "ptr"))]
-pub mod sync;
-#[cfg(all(not(no_global_oom_handling), not(no_rc), not(no_sync), target_has_atomic = "ptr"))]
-pub mod task;
-#[cfg(test)]
-mod tests;
-pub mod vec;
-
-#[doc(hidden)]
-#[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")]
-pub mod __export {
- pub use core::format_args;
-}
-
-#[cfg(test)]
-#[allow(dead_code)] // Not used in all configurations
-pub(crate) mod test_helpers {
- /// Copied from `std::test_helpers::test_rng`, since these tests rely on the
- /// seed not being the same for every RNG invocation too.
- pub(crate) fn test_rng() -> rand_xorshift::XorShiftRng {
- use std::hash::{BuildHasher, Hash, Hasher};
- let mut hasher = std::hash::RandomState::new().build_hasher();
- std::panic::Location::caller().hash(&mut hasher);
- let hc64 = hasher.finish();
- let seed_vec =
- hc64.to_le_bytes().into_iter().chain(0u8..8).collect::<crate::vec::Vec<u8>>();
- let seed: [u8; 16] = seed_vec.as_slice().try_into().unwrap();
- rand::SeedableRng::from_seed(seed)
- }
-}
diff --git a/rust/alloc/raw_vec.rs b/rust/alloc/raw_vec.rs
deleted file mode 100644
index 98b6abf30a..0000000000
--- a/rust/alloc/raw_vec.rs
+++ /dev/null
@@ -1,611 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-#![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")]
-
-use core::alloc::LayoutError;
-use core::cmp;
-use core::intrinsics;
-use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
-use core::ptr::{self, NonNull, Unique};
-use core::slice;
-
-#[cfg(not(no_global_oom_handling))]
-use crate::alloc::handle_alloc_error;
-use crate::alloc::{Allocator, Global, Layout};
-use crate::boxed::Box;
-use crate::collections::TryReserveError;
-use crate::collections::TryReserveErrorKind::*;
-
-#[cfg(test)]
-mod tests;
-
-enum AllocInit {
- /// The contents of the new memory are uninitialized.
- Uninitialized,
- /// The new memory is guaranteed to be zeroed.
- #[allow(dead_code)]
- Zeroed,
-}
-
-#[repr(transparent)]
-#[cfg_attr(target_pointer_width = "16", rustc_layout_scalar_valid_range_end(0x7fff))]
-#[cfg_attr(target_pointer_width = "32", rustc_layout_scalar_valid_range_end(0x7fff_ffff))]
-#[cfg_attr(target_pointer_width = "64", rustc_layout_scalar_valid_range_end(0x7fff_ffff_ffff_ffff))]
-struct Cap(usize);
-
-impl Cap {
- const ZERO: Cap = unsafe { Cap(0) };
-}
-
-/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
-/// a buffer of memory on the heap without having to worry about all the corner cases
-/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
-/// In particular:
-///
-/// * Produces `Unique::dangling()` on zero-sized types.
-/// * Produces `Unique::dangling()` on zero-length allocations.
-/// * Avoids freeing `Unique::dangling()`.
-/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics).
-/// * Guards against 32-bit systems allocating more than isize::MAX bytes.
-/// * Guards against overflowing your length.
-/// * Calls `handle_alloc_error` for fallible allocations.
-/// * Contains a `ptr::Unique` and thus endows the user with all related benefits.
-/// * Uses the excess returned from the allocator to use the largest available capacity.
-///
-/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
-/// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec`
-/// to handle the actual things *stored* inside of a `RawVec`.
-///
-/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns
-/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
-/// `Box<[T]>`, since `capacity()` won't yield the length.
-#[allow(missing_debug_implementations)]
-pub(crate) struct RawVec<T, A: Allocator = Global> {
- ptr: Unique<T>,
- /// Never used for ZSTs; it's `capacity()`'s responsibility to return usize::MAX in that case.
- ///
- /// # Safety
- ///
- /// `cap` must be in the `0..=isize::MAX` range.
- cap: Cap,
- alloc: A,
-}
-
-impl<T> RawVec<T, Global> {
- /// HACK(Centril): This exists because stable `const fn` can only call stable `const fn`, so
- /// they cannot call `Self::new()`.
- ///
- /// If you change `RawVec<T>::new` or dependencies, please take care to not introduce anything
- /// that would truly const-call something unstable.
- pub const NEW: Self = Self::new();
-
- /// Creates the biggest possible `RawVec` (on the system heap)
- /// without allocating. If `T` has positive size, then this makes a
- /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a
- /// `RawVec` with capacity `usize::MAX`. Useful for implementing
- /// delayed allocation.
- #[must_use]
- pub const fn new() -> Self {
- Self::new_in(Global)
- }
-
- /// Creates a `RawVec` (on the system heap) with exactly the
- /// capacity and alignment requirements for a `[T; capacity]`. This is
- /// equivalent to calling `RawVec::new` when `capacity` is `0` or `T` is
- /// zero-sized. Note that if `T` is zero-sized this means you will
- /// *not* get a `RawVec` with the requested capacity.
- ///
- /// # Panics
- ///
- /// Panics if the requested capacity exceeds `isize::MAX` bytes.
- ///
- /// # Aborts
- ///
- /// Aborts on OOM.
- #[cfg(not(any(no_global_oom_handling, test)))]
- #[must_use]
- #[inline]
- pub fn with_capacity(capacity: usize) -> Self {
- Self::with_capacity_in(capacity, Global)
- }
-
- /// Like `with_capacity`, but guarantees the buffer is zeroed.
- #[cfg(not(any(no_global_oom_handling, test)))]
- #[must_use]
- #[inline]
- pub fn with_capacity_zeroed(capacity: usize) -> Self {
- Self::with_capacity_zeroed_in(capacity, Global)
- }
-}
-
-impl<T, A: Allocator> RawVec<T, A> {
- // Tiny Vecs are dumb. Skip to:
- // - 8 if the element size is 1, because any heap allocators is likely
- // to round up a request of less than 8 bytes to at least 8 bytes.
- // - 4 if elements are moderate-sized (<= 1 KiB).
- // - 1 otherwise, to avoid wasting too much space for very short Vecs.
- pub(crate) const MIN_NON_ZERO_CAP: usize = if mem::size_of::<T>() == 1 {
- 8
- } else if mem::size_of::<T>() <= 1024 {
- 4
- } else {
- 1
- };
-
- /// Like `new`, but parameterized over the choice of allocator for
- /// the returned `RawVec`.
- pub const fn new_in(alloc: A) -> Self {
- // `cap: 0` means "unallocated". zero-sized types are ignored.
- Self { ptr: Unique::dangling(), cap: Cap::ZERO, alloc }
- }
-
- /// Like `with_capacity`, but parameterized over the choice of
- /// allocator for the returned `RawVec`.
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
- Self::allocate_in(capacity, AllocInit::Uninitialized, alloc)
- }
-
- /// Like `try_with_capacity`, but parameterized over the choice of
- /// allocator for the returned `RawVec`.
- #[inline]
- pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
- Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc)
- }
-
- /// Like `with_capacity_zeroed`, but parameterized over the choice
- /// of allocator for the returned `RawVec`.
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
- Self::allocate_in(capacity, AllocInit::Zeroed, alloc)
- }
-
- /// Converts the entire buffer into `Box<[MaybeUninit<T>]>` with the specified `len`.
- ///
- /// Note that this will correctly reconstitute any `cap` changes
- /// that may have been performed. (See description of type for details.)
- ///
- /// # Safety
- ///
- /// * `len` must be greater than or equal to the most recently requested capacity, and
- /// * `len` must be less than or equal to `self.capacity()`.
- ///
- /// Note, that the requested capacity and `self.capacity()` could differ, as
- /// an allocator could overallocate and return a greater memory block than requested.
- pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>], A> {
- // Sanity-check one half of the safety requirement (we cannot check the other half).
- debug_assert!(
- len <= self.capacity(),
- "`len` must be smaller than or equal to `self.capacity()`"
- );
-
- let me = ManuallyDrop::new(self);
- unsafe {
- let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
- Box::from_raw_in(slice, ptr::read(&me.alloc))
- }
- }
-
- #[cfg(not(no_global_oom_handling))]
- fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self {
- // Don't allocate here because `Drop` will not deallocate when `capacity` is 0.
- if T::IS_ZST || capacity == 0 {
- Self::new_in(alloc)
- } else {
- // We avoid `unwrap_or_else` here because it bloats the amount of
- // LLVM IR generated.
- let layout = match Layout::array::<T>(capacity) {
- Ok(layout) => layout,
- Err(_) => capacity_overflow(),
- };
- match alloc_guard(layout.size()) {
- Ok(_) => {}
- Err(_) => capacity_overflow(),
- }
- let result = match init {
- AllocInit::Uninitialized => alloc.allocate(layout),
- AllocInit::Zeroed => alloc.allocate_zeroed(layout),
- };
- let ptr = match result {
- Ok(ptr) => ptr,
- Err(_) => handle_alloc_error(layout),
- };
-
- // Allocators currently return a `NonNull<[u8]>` whose length
- // matches the size requested. If that ever changes, the capacity
- // here should change to `ptr.len() / mem::size_of::<T>()`.
- Self {
- ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) },
- cap: unsafe { Cap(capacity) },
- alloc,
- }
- }
- }
-
- fn try_allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Result<Self, TryReserveError> {
- // Don't allocate here because `Drop` will not deallocate when `capacity` is 0.
- if T::IS_ZST || capacity == 0 {
- return Ok(Self::new_in(alloc));
- }
-
- let layout = Layout::array::<T>(capacity).map_err(|_| CapacityOverflow)?;
- alloc_guard(layout.size())?;
- let result = match init {
- AllocInit::Uninitialized => alloc.allocate(layout),
- AllocInit::Zeroed => alloc.allocate_zeroed(layout),
- };
- let ptr = result.map_err(|_| AllocError { layout, non_exhaustive: () })?;
-
- // Allocators currently return a `NonNull<[u8]>` whose length
- // matches the size requested. If that ever changes, the capacity
- // here should change to `ptr.len() / mem::size_of::<T>()`.
- Ok(Self {
- ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) },
- cap: unsafe { Cap(capacity) },
- alloc,
- })
- }
-
- /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator.
- ///
- /// # Safety
- ///
- /// The `ptr` must be allocated (via the given allocator `alloc`), and with the given
- /// `capacity`.
- /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
- /// systems). For ZSTs capacity is ignored.
- /// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is
- /// guaranteed.
- #[inline]
- pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
- let cap = if T::IS_ZST { Cap::ZERO } else { unsafe { Cap(capacity) } };
- Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc }
- }
-
- /// Gets a raw pointer to the start of the allocation. Note that this is
- /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must
- /// be careful.
- #[inline]
- pub fn ptr(&self) -> *mut T {
- self.ptr.as_ptr()
- }
-
- /// Gets the capacity of the allocation.
- ///
- /// This will always be `usize::MAX` if `T` is zero-sized.
- #[inline(always)]
- pub fn capacity(&self) -> usize {
- if T::IS_ZST { usize::MAX } else { self.cap.0 }
- }
-
- /// Returns a shared reference to the allocator backing this `RawVec`.
- pub fn allocator(&self) -> &A {
- &self.alloc
- }
-
- fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
- if T::IS_ZST || self.cap.0 == 0 {
- None
- } else {
- // We could use Layout::array here which ensures the absence of isize and usize overflows
- // and could hypothetically handle differences between stride and size, but this memory
- // has already been allocated so we know it can't overflow and currently rust does not
- // support such types. So we can do better by skipping some checks and avoid an unwrap.
- let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
- unsafe {
- let align = mem::align_of::<T>();
- let size = mem::size_of::<T>().unchecked_mul(self.cap.0);
- let layout = Layout::from_size_align_unchecked(size, align);
- Some((self.ptr.cast().into(), layout))
- }
- }
- }
-
- /// Ensures that the buffer contains at least enough space to hold `len +
- /// additional` elements. If it doesn't already have enough capacity, will
- /// reallocate enough space plus comfortable slack space to get amortized
- /// *O*(1) behavior. Will limit this behavior if it would needlessly cause
- /// itself to panic.
- ///
- /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
- /// the requested space. This is not really unsafe, but the unsafe
- /// code *you* write that relies on the behavior of this function may break.
- ///
- /// This is ideal for implementing a bulk-push operation like `extend`.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Aborts
- ///
- /// Aborts on OOM.
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- pub fn reserve(&mut self, len: usize, additional: usize) {
- // Callers expect this function to be very cheap when there is already sufficient capacity.
- // Therefore, we move all the resizing and error-handling logic from grow_amortized and
- // handle_reserve behind a call, while making sure that this function is likely to be
- // inlined as just a comparison and a call if the comparison fails.
- #[cold]
- fn do_reserve_and_handle<T, A: Allocator>(
- slf: &mut RawVec<T, A>,
- len: usize,
- additional: usize,
- ) {
- handle_reserve(slf.grow_amortized(len, additional));
- }
-
- if self.needs_to_grow(len, additional) {
- do_reserve_and_handle(self, len, additional);
- }
- }
-
- /// A specialized version of `reserve()` used only by the hot and
- /// oft-instantiated `Vec::push()`, which does its own capacity check.
- #[cfg(not(no_global_oom_handling))]
- #[inline(never)]
- pub fn reserve_for_push(&mut self, len: usize) {
- handle_reserve(self.grow_amortized(len, 1));
- }
-
- /// The same as `reserve`, but returns on errors instead of panicking or aborting.
- pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
- if self.needs_to_grow(len, additional) {
- self.grow_amortized(len, additional)?;
- }
- unsafe {
- // Inform the optimizer that the reservation has succeeded or wasn't needed
- core::intrinsics::assume(!self.needs_to_grow(len, additional));
- }
- Ok(())
- }
-
- /// The same as `reserve_for_push`, but returns on errors instead of panicking or aborting.
- #[inline(never)]
- pub fn try_reserve_for_push(&mut self, len: usize) -> Result<(), TryReserveError> {
- self.grow_amortized(len, 1)
- }
-
- /// Ensures that the buffer contains at least enough space to hold `len +
- /// additional` elements. If it doesn't already, will reallocate the
- /// minimum possible amount of memory necessary. Generally this will be
- /// exactly the amount of memory necessary, but in principle the allocator
- /// is free to give back more than we asked for.
- ///
- /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
- /// the requested space. This is not really unsafe, but the unsafe code
- /// *you* write that relies on the behavior of this function may break.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Aborts
- ///
- /// Aborts on OOM.
- #[cfg(not(no_global_oom_handling))]
- pub fn reserve_exact(&mut self, len: usize, additional: usize) {
- handle_reserve(self.try_reserve_exact(len, additional));
- }
-
- /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
- pub fn try_reserve_exact(
- &mut self,
- len: usize,
- additional: usize,
- ) -> Result<(), TryReserveError> {
- if self.needs_to_grow(len, additional) {
- self.grow_exact(len, additional)?;
- }
- unsafe {
- // Inform the optimizer that the reservation has succeeded or wasn't needed
- core::intrinsics::assume(!self.needs_to_grow(len, additional));
- }
- Ok(())
- }
-
- /// Shrinks the buffer down to the specified capacity. If the given amount
- /// is 0, actually completely deallocates.
- ///
- /// # Panics
- ///
- /// Panics if the given amount is *larger* than the current capacity.
- ///
- /// # Aborts
- ///
- /// Aborts on OOM.
- #[cfg(not(no_global_oom_handling))]
- pub fn shrink_to_fit(&mut self, cap: usize) {
- handle_reserve(self.shrink(cap));
- }
-}
-
-impl<T, A: Allocator> RawVec<T, A> {
- /// Returns if the buffer needs to grow to fulfill the needed extra capacity.
- /// Mainly used to make inlining reserve-calls possible without inlining `grow`.
- fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
- additional > self.capacity().wrapping_sub(len)
- }
-
- /// # Safety:
- ///
- /// `cap` must not exceed `isize::MAX`.
- unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
- // Allocators currently return a `NonNull<[u8]>` whose length matches
- // the size requested. If that ever changes, the capacity here should
- // change to `ptr.len() / mem::size_of::<T>()`.
- self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) };
- self.cap = unsafe { Cap(cap) };
- }
-
- // This method is usually instantiated many times. So we want it to be as
- // small as possible, to improve compile times. But we also want as much of
- // its contents to be statically computable as possible, to make the
- // generated code run faster. Therefore, this method is carefully written
- // so that all of the code that depends on `T` is within it, while as much
- // of the code that doesn't depend on `T` as possible is in functions that
- // are non-generic over `T`.
- fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
- // This is ensured by the calling contexts.
- debug_assert!(additional > 0);
-
- if T::IS_ZST {
- // Since we return a capacity of `usize::MAX` when `elem_size` is
- // 0, getting to here necessarily means the `RawVec` is overfull.
- return Err(CapacityOverflow.into());
- }
-
- // Nothing we can really do about these checks, sadly.
- let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
-
- // This guarantees exponential growth. The doubling cannot overflow
- // because `cap <= isize::MAX` and the type of `cap` is `usize`.
- let cap = cmp::max(self.cap.0 * 2, required_cap);
- let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
-
- let new_layout = Layout::array::<T>(cap);
-
- // `finish_grow` is non-generic over `T`.
- let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
- // SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than isize::MAX items
- unsafe { self.set_ptr_and_cap(ptr, cap) };
- Ok(())
- }
-
- // The constraints on this method are much the same as those on
- // `grow_amortized`, but this method is usually instantiated less often so
- // it's less critical.
- fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
- if T::IS_ZST {
- // Since we return a capacity of `usize::MAX` when the type size is
- // 0, getting to here necessarily means the `RawVec` is overfull.
- return Err(CapacityOverflow.into());
- }
-
- let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
- let new_layout = Layout::array::<T>(cap);
-
- // `finish_grow` is non-generic over `T`.
- let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
- // SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than isize::MAX items
- unsafe {
- self.set_ptr_and_cap(ptr, cap);
- }
- Ok(())
- }
-
- #[cfg(not(no_global_oom_handling))]
- fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> {
- assert!(cap <= self.capacity(), "Tried to shrink to a larger capacity");
-
- let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
- // See current_memory() why this assert is here
- let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
-
- // If shrinking to 0, deallocate the buffer. We don't reach this point
- // for the T::IS_ZST case since current_memory() will have returned
- // None.
- if cap == 0 {
- unsafe { self.alloc.deallocate(ptr, layout) };
- self.ptr = Unique::dangling();
- self.cap = Cap::ZERO;
- } else {
- let ptr = unsafe {
- // `Layout::array` cannot overflow here because it would have
- // overflowed earlier when capacity was larger.
- let new_size = mem::size_of::<T>().unchecked_mul(cap);
- let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
- self.alloc
- .shrink(ptr, layout, new_layout)
- .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
- };
- // SAFETY: if the allocation is valid, then the capacity is too
- unsafe {
- self.set_ptr_and_cap(ptr, cap);
- }
- }
- Ok(())
- }
-}
-
-// This function is outside `RawVec` to minimize compile times. See the comment
-// above `RawVec::grow_amortized` for details. (The `A` parameter isn't
-// significant, because the number of different `A` types seen in practice is
-// much smaller than the number of `T` types.)
-#[inline(never)]
-fn finish_grow<A>(
- new_layout: Result<Layout, LayoutError>,
- current_memory: Option<(NonNull<u8>, Layout)>,
- alloc: &mut A,
-) -> Result<NonNull<[u8]>, TryReserveError>
-where
- A: Allocator,
-{
- // Check for the error here to minimize the size of `RawVec::grow_*`.
- let new_layout = new_layout.map_err(|_| CapacityOverflow)?;
-
- alloc_guard(new_layout.size())?;
-
- let memory = if let Some((ptr, old_layout)) = current_memory {
- debug_assert_eq!(old_layout.align(), new_layout.align());
- unsafe {
- // The allocator checks for alignment equality
- intrinsics::assume(old_layout.align() == new_layout.align());
- alloc.grow(ptr, old_layout, new_layout)
- }
- } else {
- alloc.allocate(new_layout)
- };
-
- memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into())
-}
-
-unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
- /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
- fn drop(&mut self) {
- if let Some((ptr, layout)) = self.current_memory() {
- unsafe { self.alloc.deallocate(ptr, layout) }
- }
- }
-}
-
-// Central function for reserve error handling.
-#[cfg(not(no_global_oom_handling))]
-#[inline]
-fn handle_reserve(result: Result<(), TryReserveError>) {
- match result.map_err(|e| e.kind()) {
- Err(CapacityOverflow) => capacity_overflow(),
- Err(AllocError { layout, .. }) => handle_alloc_error(layout),
- Ok(()) => { /* yay */ }
- }
-}
-
-// We need to guarantee the following:
-// * We don't ever allocate `> isize::MAX` byte-size objects.
-// * We don't overflow `usize::MAX` and actually allocate too little.
-//
-// On 64-bit we just need to check for overflow since trying to allocate
-// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
-// an extra guard for this in case we're running on a platform which can use
-// all 4GB in user-space, e.g., PAE or x32.
-
-#[inline]
-fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
- if usize::BITS < 64 && alloc_size > isize::MAX as usize {
- Err(CapacityOverflow.into())
- } else {
- Ok(())
- }
-}
-
-// One central function responsible for reporting capacity overflows. This'll
-// ensure that the code generation related to these panics is minimal as there's
-// only one location which panics rather than a bunch throughout the module.
-#[cfg(not(no_global_oom_handling))]
-#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
-fn capacity_overflow() -> ! {
- panic!("capacity overflow");
-}
diff --git a/rust/alloc/slice.rs b/rust/alloc/slice.rs
deleted file mode 100644
index 1181836da5..0000000000
--- a/rust/alloc/slice.rs
+++ /dev/null
@@ -1,890 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-//! Utilities for the slice primitive type.
-//!
-//! *[See also the slice primitive type](slice).*
-//!
-//! Most of the structs in this module are iterator types which can only be created
-//! using a certain function. For example, `slice.iter()` yields an [`Iter`].
-//!
-//! A few functions are provided to create a slice from a value reference
-//! or from a raw pointer.
-#![stable(feature = "rust1", since = "1.0.0")]
-// Many of the usings in this module are only used in the test configuration.
-// It's cleaner to just turn off the unused_imports warning than to fix them.
-#![cfg_attr(test, allow(unused_imports, dead_code))]
-
-use core::borrow::{Borrow, BorrowMut};
-#[cfg(not(no_global_oom_handling))]
-use core::cmp::Ordering::{self, Less};
-#[cfg(not(no_global_oom_handling))]
-use core::mem::{self, SizedTypeProperties};
-#[cfg(not(no_global_oom_handling))]
-use core::ptr;
-#[cfg(not(no_global_oom_handling))]
-use core::slice::sort;
-
-use crate::alloc::Allocator;
-#[cfg(not(no_global_oom_handling))]
-use crate::alloc::{self, Global};
-#[cfg(not(no_global_oom_handling))]
-use crate::borrow::ToOwned;
-use crate::boxed::Box;
-use crate::vec::Vec;
-
-#[cfg(test)]
-mod tests;
-
-#[unstable(feature = "slice_range", issue = "76393")]
-pub use core::slice::range;
-#[unstable(feature = "array_chunks", issue = "74985")]
-pub use core::slice::ArrayChunks;
-#[unstable(feature = "array_chunks", issue = "74985")]
-pub use core::slice::ArrayChunksMut;
-#[unstable(feature = "array_windows", issue = "75027")]
-pub use core::slice::ArrayWindows;
-#[stable(feature = "inherent_ascii_escape", since = "1.60.0")]
-pub use core::slice::EscapeAscii;
-#[stable(feature = "slice_get_slice", since = "1.28.0")]
-pub use core::slice::SliceIndex;
-#[stable(feature = "from_ref", since = "1.28.0")]
-pub use core::slice::{from_mut, from_ref};
-#[unstable(feature = "slice_from_ptr_range", issue = "89792")]
-pub use core::slice::{from_mut_ptr_range, from_ptr_range};
-#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::slice::{from_raw_parts, from_raw_parts_mut};
-#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::slice::{Chunks, Windows};
-#[stable(feature = "chunks_exact", since = "1.31.0")]
-pub use core::slice::{ChunksExact, ChunksExactMut};
-#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::slice::{ChunksMut, Split, SplitMut};
-#[unstable(feature = "slice_group_by", issue = "80552")]
-pub use core::slice::{GroupBy, GroupByMut};
-#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::slice::{Iter, IterMut};
-#[stable(feature = "rchunks", since = "1.31.0")]
-pub use core::slice::{RChunks, RChunksExact, RChunksExactMut, RChunksMut};
-#[stable(feature = "slice_rsplit", since = "1.27.0")]
-pub use core::slice::{RSplit, RSplitMut};
-#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::slice::{RSplitN, RSplitNMut, SplitN, SplitNMut};
-#[stable(feature = "split_inclusive", since = "1.51.0")]
-pub use core::slice::{SplitInclusive, SplitInclusiveMut};
-
-////////////////////////////////////////////////////////////////////////////////
-// Basic slice extension methods
-////////////////////////////////////////////////////////////////////////////////
-
-// HACK(japaric) needed for the implementation of `vec!` macro during testing
-// N.B., see the `hack` module in this file for more details.
-#[cfg(test)]
-pub use hack::into_vec;
-
-// HACK(japaric) needed for the implementation of `Vec::clone` during testing
-// N.B., see the `hack` module in this file for more details.
-#[cfg(test)]
-pub use hack::to_vec;
-
-// HACK(japaric): With cfg(test) `impl [T]` is not available, these three
-// functions are actually methods that are in `impl [T]` but not in
-// `core::slice::SliceExt` - we need to supply these functions for the
-// `test_permutations` test
-pub(crate) mod hack {
- use core::alloc::Allocator;
-
- use crate::boxed::Box;
- use crate::vec::Vec;
-
- // We shouldn't add inline attribute to this since this is used in
- // `vec!` macro mostly and causes perf regression. See #71204 for
- // discussion and perf results.
- pub fn into_vec<T, A: Allocator>(b: Box<[T], A>) -> Vec<T, A> {
- unsafe {
- let len = b.len();
- let (b, alloc) = Box::into_raw_with_allocator(b);
- Vec::from_raw_parts_in(b as *mut T, len, len, alloc)
- }
- }
-
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- pub fn to_vec<T: ConvertVec, A: Allocator>(s: &[T], alloc: A) -> Vec<T, A> {
- T::to_vec(s, alloc)
- }
-
- #[cfg(not(no_global_oom_handling))]
- pub trait ConvertVec {
- fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A>
- where
- Self: Sized;
- }
-
- #[cfg(not(no_global_oom_handling))]
- impl<T: Clone> ConvertVec for T {
- #[inline]
- default fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A> {
- struct DropGuard<'a, T, A: Allocator> {
- vec: &'a mut Vec<T, A>,
- num_init: usize,
- }
- impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> {
- #[inline]
- fn drop(&mut self) {
- // SAFETY:
- // items were marked initialized in the loop below
- unsafe {
- self.vec.set_len(self.num_init);
- }
- }
- }
- let mut vec = Vec::with_capacity_in(s.len(), alloc);
- let mut guard = DropGuard { vec: &mut vec, num_init: 0 };
- let slots = guard.vec.spare_capacity_mut();
- // .take(slots.len()) is necessary for LLVM to remove bounds checks
- // and has better codegen than zip.
- for (i, b) in s.iter().enumerate().take(slots.len()) {
- guard.num_init = i;
- slots[i].write(b.clone());
- }
- core::mem::forget(guard);
- // SAFETY:
- // the vec was allocated and initialized above to at least this length.
- unsafe {
- vec.set_len(s.len());
- }
- vec
- }
- }
-
- #[cfg(not(no_global_oom_handling))]
- impl<T: Copy> ConvertVec for T {
- #[inline]
- fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A> {
- let mut v = Vec::with_capacity_in(s.len(), alloc);
- // SAFETY:
- // allocated above with the capacity of `s`, and initialize to `s.len()` in
- // ptr::copy_to_non_overlapping below.
- unsafe {
- s.as_ptr().copy_to_nonoverlapping(v.as_mut_ptr(), s.len());
- v.set_len(s.len());
- }
- v
- }
- }
-}
-
-#[cfg(not(test))]
-impl<T> [T] {
- /// Sorts the slice.
- ///
- /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case.
- ///
- /// When applicable, unstable sorting is preferred because it is generally faster than stable
- /// sorting and it doesn't allocate auxiliary memory.
- /// See [`sort_unstable`](slice::sort_unstable).
- ///
- /// # Current implementation
- ///
- /// The current algorithm is an adaptive, iterative merge sort inspired by
- /// [timsort](https://en.wikipedia.org/wiki/Timsort).
- /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
- /// two or more sorted sequences concatenated one after another.
- ///
- /// Also, it allocates temporary storage half the size of `self`, but for short slices a
- /// non-allocating insertion sort is used instead.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = [-5, 4, 1, -3, 2];
- ///
- /// v.sort();
- /// assert!(v == [-5, -3, 1, 2, 4]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[inline]
- pub fn sort(&mut self)
- where
- T: Ord,
- {
- stable_sort(self, T::lt);
- }
-
- /// Sorts the slice with a comparator function.
- ///
- /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case.
- ///
- /// The comparator function must define a total ordering for the elements in the slice. If
- /// the ordering is not total, the order of the elements is unspecified. An order is a
- /// total order if it is (for all `a`, `b` and `c`):
- ///
- /// * total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is true, and
- /// * transitive, `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`.
- ///
- /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
- /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
- ///
- /// ```
- /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
- /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
- /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
- /// ```
- ///
- /// When applicable, unstable sorting is preferred because it is generally faster than stable
- /// sorting and it doesn't allocate auxiliary memory.
- /// See [`sort_unstable_by`](slice::sort_unstable_by).
- ///
- /// # Current implementation
- ///
- /// The current algorithm is an adaptive, iterative merge sort inspired by
- /// [timsort](https://en.wikipedia.org/wiki/Timsort).
- /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
- /// two or more sorted sequences concatenated one after another.
- ///
- /// Also, it allocates temporary storage half the size of `self`, but for short slices a
- /// non-allocating insertion sort is used instead.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = [5, 4, 1, 3, 2];
- /// v.sort_by(|a, b| a.cmp(b));
- /// assert!(v == [1, 2, 3, 4, 5]);
- ///
- /// // reverse sorting
- /// v.sort_by(|a, b| b.cmp(a));
- /// assert!(v == [5, 4, 3, 2, 1]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[inline]
- pub fn sort_by<F>(&mut self, mut compare: F)
- where
- F: FnMut(&T, &T) -> Ordering,
- {
- stable_sort(self, |a, b| compare(a, b) == Less);
- }
-
- /// Sorts the slice with a key extraction function.
- ///
- /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* \* log(*n*))
- /// worst-case, where the key function is *O*(*m*).
- ///
- /// For expensive key functions (e.g. functions that are not simple property accesses or
- /// basic operations), [`sort_by_cached_key`](slice::sort_by_cached_key) is likely to be
- /// significantly faster, as it does not recompute element keys.
- ///
- /// When applicable, unstable sorting is preferred because it is generally faster than stable
- /// sorting and it doesn't allocate auxiliary memory.
- /// See [`sort_unstable_by_key`](slice::sort_unstable_by_key).
- ///
- /// # Current implementation
- ///
- /// The current algorithm is an adaptive, iterative merge sort inspired by
- /// [timsort](https://en.wikipedia.org/wiki/Timsort).
- /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
- /// two or more sorted sequences concatenated one after another.
- ///
- /// Also, it allocates temporary storage half the size of `self`, but for short slices a
- /// non-allocating insertion sort is used instead.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = [-5i32, 4, 1, -3, 2];
- ///
- /// v.sort_by_key(|k| k.abs());
- /// assert!(v == [1, 2, -3, 4, -5]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "slice_sort_by_key", since = "1.7.0")]
- #[inline]
- pub fn sort_by_key<K, F>(&mut self, mut f: F)
- where
- F: FnMut(&T) -> K,
- K: Ord,
- {
- stable_sort(self, |a, b| f(a).lt(&f(b)));
- }
-
- /// Sorts the slice with a key extraction function.
- ///
- /// During sorting, the key function is called at most once per element, by using
- /// temporary storage to remember the results of key evaluation.
- /// The order of calls to the key function is unspecified and may change in future versions
- /// of the standard library.
- ///
- /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* + *n* \* log(*n*))
- /// worst-case, where the key function is *O*(*m*).
- ///
- /// For simple key functions (e.g., functions that are property accesses or
- /// basic operations), [`sort_by_key`](slice::sort_by_key) is likely to be
- /// faster.
- ///
- /// # Current implementation
- ///
- /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
- /// which combines the fast average case of randomized quicksort with the fast worst case of
- /// heapsort, while achieving linear time on slices with certain patterns. It uses some
- /// randomization to avoid degenerate cases, but with a fixed seed to always provide
- /// deterministic behavior.
- ///
- /// In the worst case, the algorithm allocates temporary storage in a `Vec<(K, usize)>` the
- /// length of the slice.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = [-5i32, 4, 32, -3, 2];
- ///
- /// v.sort_by_cached_key(|k| k.to_string());
- /// assert!(v == [-3, -5, 2, 32, 4]);
- /// ```
- ///
- /// [pdqsort]: https://github.com/orlp/pdqsort
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "slice_sort_by_cached_key", since = "1.34.0")]
- #[inline]
- pub fn sort_by_cached_key<K, F>(&mut self, f: F)
- where
- F: FnMut(&T) -> K,
- K: Ord,
- {
- // Helper macro for indexing our vector by the smallest possible type, to reduce allocation.
- macro_rules! sort_by_key {
- ($t:ty, $slice:ident, $f:ident) => {{
- let mut indices: Vec<_> =
- $slice.iter().map($f).enumerate().map(|(i, k)| (k, i as $t)).collect();
- // The elements of `indices` are unique, as they are indexed, so any sort will be
- // stable with respect to the original slice. We use `sort_unstable` here because
- // it requires less memory allocation.
- indices.sort_unstable();
- for i in 0..$slice.len() {
- let mut index = indices[i].1;
- while (index as usize) < i {
- index = indices[index as usize].1;
- }
- indices[i].1 = index;
- $slice.swap(i, index as usize);
- }
- }};
- }
-
- let sz_u8 = mem::size_of::<(K, u8)>();
- let sz_u16 = mem::size_of::<(K, u16)>();
- let sz_u32 = mem::size_of::<(K, u32)>();
- let sz_usize = mem::size_of::<(K, usize)>();
-
- let len = self.len();
- if len < 2 {
- return;
- }
- if sz_u8 < sz_u16 && len <= (u8::MAX as usize) {
- return sort_by_key!(u8, self, f);
- }
- if sz_u16 < sz_u32 && len <= (u16::MAX as usize) {
- return sort_by_key!(u16, self, f);
- }
- if sz_u32 < sz_usize && len <= (u32::MAX as usize) {
- return sort_by_key!(u32, self, f);
- }
- sort_by_key!(usize, self, f)
- }
-
- /// Copies `self` into a new `Vec`.
- ///
- /// # Examples
- ///
- /// ```
- /// let s = [10, 40, 30];
- /// let x = s.to_vec();
- /// // Here, `s` and `x` can be modified independently.
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[rustc_conversion_suggestion]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[inline]
- pub fn to_vec(&self) -> Vec<T>
- where
- T: Clone,
- {
- self.to_vec_in(Global)
- }
-
- /// Copies `self` into a new `Vec` with an allocator.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// let s = [10, 40, 30];
- /// let x = s.to_vec_in(System);
- /// // Here, `s` and `x` can be modified independently.
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[inline]
- #[unstable(feature = "allocator_api", issue = "32838")]
- pub fn to_vec_in<A: Allocator>(&self, alloc: A) -> Vec<T, A>
- where
- T: Clone,
- {
- // N.B., see the `hack` module in this file for more details.
- hack::to_vec(self, alloc)
- }
-
- /// Converts `self` into a vector without clones or allocation.
- ///
- /// The resulting vector can be converted back into a box via
- /// `Vec<T>`'s `into_boxed_slice` method.
- ///
- /// # Examples
- ///
- /// ```
- /// let s: Box<[i32]> = Box::new([10, 40, 30]);
- /// let x = s.into_vec();
- /// // `s` cannot be used anymore because it has been converted into `x`.
- ///
- /// assert_eq!(x, vec![10, 40, 30]);
- /// ```
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[inline]
- pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
- // N.B., see the `hack` module in this file for more details.
- hack::into_vec(self)
- }
-
- /// Creates a vector by copying a slice `n` times.
- ///
- /// # Panics
- ///
- /// This function will panic if the capacity would overflow.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]);
- /// ```
- ///
- /// A panic upon overflow:
- ///
- /// ```should_panic
- /// // this will panic at runtime
- /// b"0123456789abcdef".repeat(usize::MAX);
- /// ```
- #[rustc_allow_incoherent_impl]
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "repeat_generic_slice", since = "1.40.0")]
- pub fn repeat(&self, n: usize) -> Vec<T>
- where
- T: Copy,
- {
- if n == 0 {
- return Vec::new();
- }
-
- // If `n` is larger than zero, it can be split as
- // `n = 2^expn + rem (2^expn > rem, expn >= 0, rem >= 0)`.
- // `2^expn` is the number represented by the leftmost '1' bit of `n`,
- // and `rem` is the remaining part of `n`.
-
- // Using `Vec` to access `set_len()`.
- let capacity = self.len().checked_mul(n).expect("capacity overflow");
- let mut buf = Vec::with_capacity(capacity);
-
- // `2^expn` repetition is done by doubling `buf` `expn`-times.
- buf.extend(self);
- {
- let mut m = n >> 1;
- // If `m > 0`, there are remaining bits up to the leftmost '1'.
- while m > 0 {
- // `buf.extend(buf)`:
- unsafe {
- ptr::copy_nonoverlapping(
- buf.as_ptr(),
- (buf.as_mut_ptr() as *mut T).add(buf.len()),
- buf.len(),
- );
- // `buf` has capacity of `self.len() * n`.
- let buf_len = buf.len();
- buf.set_len(buf_len * 2);
- }
-
- m >>= 1;
- }
- }
-
- // `rem` (`= n - 2^expn`) repetition is done by copying
- // first `rem` repetitions from `buf` itself.
- let rem_len = capacity - buf.len(); // `self.len() * rem`
- if rem_len > 0 {
- // `buf.extend(buf[0 .. rem_len])`:
- unsafe {
- // This is non-overlapping since `2^expn > rem`.
- ptr::copy_nonoverlapping(
- buf.as_ptr(),
- (buf.as_mut_ptr() as *mut T).add(buf.len()),
- rem_len,
- );
- // `buf.len() + rem_len` equals to `buf.capacity()` (`= self.len() * n`).
- buf.set_len(capacity);
- }
- }
- buf
- }
-
- /// Flattens a slice of `T` into a single value `Self::Output`.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(["hello", "world"].concat(), "helloworld");
- /// assert_eq!([[1, 2], [3, 4]].concat(), [1, 2, 3, 4]);
- /// ```
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn concat<Item: ?Sized>(&self) -> <Self as Concat<Item>>::Output
- where
- Self: Concat<Item>,
- {
- Concat::concat(self)
- }
-
- /// Flattens a slice of `T` into a single value `Self::Output`, placing a
- /// given separator between each.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(["hello", "world"].join(" "), "hello world");
- /// assert_eq!([[1, 2], [3, 4]].join(&0), [1, 2, 0, 3, 4]);
- /// assert_eq!([[1, 2], [3, 4]].join(&[0, 0][..]), [1, 2, 0, 0, 3, 4]);
- /// ```
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "rename_connect_to_join", since = "1.3.0")]
- pub fn join<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
- where
- Self: Join<Separator>,
- {
- Join::join(self, sep)
- }
-
- /// Flattens a slice of `T` into a single value `Self::Output`, placing a
- /// given separator between each.
- ///
- /// # Examples
- ///
- /// ```
- /// # #![allow(deprecated)]
- /// assert_eq!(["hello", "world"].connect(" "), "hello world");
- /// assert_eq!([[1, 2], [3, 4]].connect(&0), [1, 2, 0, 3, 4]);
- /// ```
- #[rustc_allow_incoherent_impl]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[deprecated(since = "1.3.0", note = "renamed to join", suggestion = "join")]
- pub fn connect<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
- where
- Self: Join<Separator>,
- {
- Join::join(self, sep)
- }
-}
-
-#[cfg(not(test))]
-impl [u8] {
- /// Returns a vector containing a copy of this slice where each byte
- /// is mapped to its ASCII upper case equivalent.
- ///
- /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
- /// but non-ASCII letters are unchanged.
- ///
- /// To uppercase the value in-place, use [`make_ascii_uppercase`].
- ///
- /// [`make_ascii_uppercase`]: slice::make_ascii_uppercase
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[must_use = "this returns the uppercase bytes as a new Vec, \
- without modifying the original"]
- #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
- #[inline]
- pub fn to_ascii_uppercase(&self) -> Vec<u8> {
- let mut me = self.to_vec();
- me.make_ascii_uppercase();
- me
- }
-
- /// Returns a vector containing a copy of this slice where each byte
- /// is mapped to its ASCII lower case equivalent.
- ///
- /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
- /// but non-ASCII letters are unchanged.
- ///
- /// To lowercase the value in-place, use [`make_ascii_lowercase`].
- ///
- /// [`make_ascii_lowercase`]: slice::make_ascii_lowercase
- #[cfg(not(no_global_oom_handling))]
- #[rustc_allow_incoherent_impl]
- #[must_use = "this returns the lowercase bytes as a new Vec, \
- without modifying the original"]
- #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
- #[inline]
- pub fn to_ascii_lowercase(&self) -> Vec<u8> {
- let mut me = self.to_vec();
- me.make_ascii_lowercase();
- me
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Extension traits for slices over specific kinds of data
-////////////////////////////////////////////////////////////////////////////////
-
-/// Helper trait for [`[T]::concat`](slice::concat).
-///
-/// Note: the `Item` type parameter is not used in this trait,
-/// but it allows impls to be more generic.
-/// Without it, we get this error:
-///
-/// ```error
-/// error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predica
-/// --> library/alloc/src/slice.rs:608:6
-/// |
-/// 608 | impl<T: Clone, V: Borrow<[T]>> Concat for [V] {
-/// | ^ unconstrained type parameter
-/// ```
-///
-/// This is because there could exist `V` types with multiple `Borrow<[_]>` impls,
-/// such that multiple `T` types would apply:
-///
-/// ```
-/// # #[allow(dead_code)]
-/// pub struct Foo(Vec<u32>, Vec<String>);
-///
-/// impl std::borrow::Borrow<[u32]> for Foo {
-/// fn borrow(&self) -> &[u32] { &self.0 }
-/// }
-///
-/// impl std::borrow::Borrow<[String]> for Foo {
-/// fn borrow(&self) -> &[String] { &self.1 }
-/// }
-/// ```
-#[unstable(feature = "slice_concat_trait", issue = "27747")]
-pub trait Concat<Item: ?Sized> {
- #[unstable(feature = "slice_concat_trait", issue = "27747")]
- /// The resulting type after concatenation
- type Output;
-
- /// Implementation of [`[T]::concat`](slice::concat)
- #[unstable(feature = "slice_concat_trait", issue = "27747")]
- fn concat(slice: &Self) -> Self::Output;
-}
-
-/// Helper trait for [`[T]::join`](slice::join)
-#[unstable(feature = "slice_concat_trait", issue = "27747")]
-pub trait Join<Separator> {
- #[unstable(feature = "slice_concat_trait", issue = "27747")]
- /// The resulting type after concatenation
- type Output;
-
- /// Implementation of [`[T]::join`](slice::join)
- #[unstable(feature = "slice_concat_trait", issue = "27747")]
- fn join(slice: &Self, sep: Separator) -> Self::Output;
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[unstable(feature = "slice_concat_ext", issue = "27747")]
-impl<T: Clone, V: Borrow<[T]>> Concat<T> for [V] {
- type Output = Vec<T>;
-
- fn concat(slice: &Self) -> Vec<T> {
- let size = slice.iter().map(|slice| slice.borrow().len()).sum();
- let mut result = Vec::with_capacity(size);
- for v in slice {
- result.extend_from_slice(v.borrow())
- }
- result
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[unstable(feature = "slice_concat_ext", issue = "27747")]
-impl<T: Clone, V: Borrow<[T]>> Join<&T> for [V] {
- type Output = Vec<T>;
-
- fn join(slice: &Self, sep: &T) -> Vec<T> {
- let mut iter = slice.iter();
- let first = match iter.next() {
- Some(first) => first,
- None => return vec![],
- };
- let size = slice.iter().map(|v| v.borrow().len()).sum::<usize>() + slice.len() - 1;
- let mut result = Vec::with_capacity(size);
- result.extend_from_slice(first.borrow());
-
- for v in iter {
- result.push(sep.clone());
- result.extend_from_slice(v.borrow())
- }
- result
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[unstable(feature = "slice_concat_ext", issue = "27747")]
-impl<T: Clone, V: Borrow<[T]>> Join<&[T]> for [V] {
- type Output = Vec<T>;
-
- fn join(slice: &Self, sep: &[T]) -> Vec<T> {
- let mut iter = slice.iter();
- let first = match iter.next() {
- Some(first) => first,
- None => return vec![],
- };
- let size =
- slice.iter().map(|v| v.borrow().len()).sum::<usize>() + sep.len() * (slice.len() - 1);
- let mut result = Vec::with_capacity(size);
- result.extend_from_slice(first.borrow());
-
- for v in iter {
- result.extend_from_slice(sep);
- result.extend_from_slice(v.borrow())
- }
- result
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Standard trait implementations for slices
-////////////////////////////////////////////////////////////////////////////////
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> Borrow<[T]> for Vec<T, A> {
- fn borrow(&self) -> &[T] {
- &self[..]
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> BorrowMut<[T]> for Vec<T, A> {
- fn borrow_mut(&mut self) -> &mut [T] {
- &mut self[..]
- }
-}
-
-// Specializable trait for implementing ToOwned::clone_into. This is
-// public in the crate and has the Allocator parameter so that
-// vec::clone_from use it too.
-#[cfg(not(no_global_oom_handling))]
-pub(crate) trait SpecCloneIntoVec<T, A: Allocator> {
- fn clone_into(&self, target: &mut Vec<T, A>);
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Clone, A: Allocator> SpecCloneIntoVec<T, A> for [T] {
- default fn clone_into(&self, target: &mut Vec<T, A>) {
- // drop anything in target that will not be overwritten
- target.truncate(self.len());
-
- // target.len <= self.len due to the truncate above, so the
- // slices here are always in-bounds.
- let (init, tail) = self.split_at(target.len());
-
- // reuse the contained values' allocations/resources.
- target.clone_from_slice(init);
- target.extend_from_slice(tail);
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Copy, A: Allocator> SpecCloneIntoVec<T, A> for [T] {
- fn clone_into(&self, target: &mut Vec<T, A>) {
- target.clear();
- target.extend_from_slice(self);
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone> ToOwned for [T] {
- type Owned = Vec<T>;
- #[cfg(not(test))]
- fn to_owned(&self) -> Vec<T> {
- self.to_vec()
- }
-
- #[cfg(test)]
- fn to_owned(&self) -> Vec<T> {
- hack::to_vec(self, Global)
- }
-
- fn clone_into(&self, target: &mut Vec<T>) {
- SpecCloneIntoVec::clone_into(self, target);
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Sorting
-////////////////////////////////////////////////////////////////////////////////
-
-#[inline]
-#[cfg(not(no_global_oom_handling))]
-fn stable_sort<T, F>(v: &mut [T], mut is_less: F)
-where
- F: FnMut(&T, &T) -> bool,
-{
- if T::IS_ZST {
- // Sorting has no meaningful behavior on zero-sized types. Do nothing.
- return;
- }
-
- let elem_alloc_fn = |len: usize| -> *mut T {
- // SAFETY: Creating the layout is safe as long as merge_sort never calls this with len >
- // v.len(). Alloc in general will only be used as 'shadow-region' to store temporary swap
- // elements.
- unsafe { alloc::alloc(alloc::Layout::array::<T>(len).unwrap_unchecked()) as *mut T }
- };
-
- let elem_dealloc_fn = |buf_ptr: *mut T, len: usize| {
- // SAFETY: Creating the layout is safe as long as merge_sort never calls this with len >
- // v.len(). The caller must ensure that buf_ptr was created by elem_alloc_fn with the same
- // len.
- unsafe {
- alloc::dealloc(buf_ptr as *mut u8, alloc::Layout::array::<T>(len).unwrap_unchecked());
- }
- };
-
- let run_alloc_fn = |len: usize| -> *mut sort::TimSortRun {
- // SAFETY: Creating the layout is safe as long as merge_sort never calls this with an
- // obscene length or 0.
- unsafe {
- alloc::alloc(alloc::Layout::array::<sort::TimSortRun>(len).unwrap_unchecked())
- as *mut sort::TimSortRun
- }
- };
-
- let run_dealloc_fn = |buf_ptr: *mut sort::TimSortRun, len: usize| {
- // SAFETY: The caller must ensure that buf_ptr was created by elem_alloc_fn with the same
- // len.
- unsafe {
- alloc::dealloc(
- buf_ptr as *mut u8,
- alloc::Layout::array::<sort::TimSortRun>(len).unwrap_unchecked(),
- );
- }
- };
-
- sort::merge_sort(v, &mut is_less, elem_alloc_fn, elem_dealloc_fn, run_alloc_fn, run_dealloc_fn);
-}
diff --git a/rust/alloc/vec/drain.rs b/rust/alloc/vec/drain.rs
deleted file mode 100644
index 78177a9e2a..0000000000
--- a/rust/alloc/vec/drain.rs
+++ /dev/null
@@ -1,255 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-use crate::alloc::{Allocator, Global};
-use core::fmt;
-use core::iter::{FusedIterator, TrustedLen};
-use core::mem::{self, ManuallyDrop, SizedTypeProperties};
-use core::ptr::{self, NonNull};
-use core::slice::{self};
-
-use super::Vec;
-
-/// A draining iterator for `Vec<T>`.
-///
-/// This `struct` is created by [`Vec::drain`].
-/// See its documentation for more.
-///
-/// # Example
-///
-/// ```
-/// let mut v = vec![0, 1, 2];
-/// let iter: std::vec::Drain<'_, _> = v.drain(..);
-/// ```
-#[stable(feature = "drain", since = "1.6.0")]
-pub struct Drain<
- 'a,
- T: 'a,
- #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
-> {
- /// Index of tail to preserve
- pub(super) tail_start: usize,
- /// Length of tail
- pub(super) tail_len: usize,
- /// Current remaining range to remove
- pub(super) iter: slice::Iter<'a, T>,
- pub(super) vec: NonNull<Vec<T, A>>,
-}
-
-#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
- }
-}
-
-impl<'a, T, A: Allocator> Drain<'a, T, A> {
- /// Returns the remaining items of this iterator as a slice.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec!['a', 'b', 'c'];
- /// let mut drain = vec.drain(..);
- /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
- /// let _ = drain.next().unwrap();
- /// assert_eq!(drain.as_slice(), &['b', 'c']);
- /// ```
- #[must_use]
- #[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
- pub fn as_slice(&self) -> &[T] {
- self.iter.as_slice()
- }
-
- /// Returns a reference to the underlying allocator.
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[must_use]
- #[inline]
- pub fn allocator(&self) -> &A {
- unsafe { self.vec.as_ref().allocator() }
- }
-
- /// Keep unyielded elements in the source `Vec`.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(drain_keep_rest)]
- ///
- /// let mut vec = vec!['a', 'b', 'c'];
- /// let mut drain = vec.drain(..);
- ///
- /// assert_eq!(drain.next().unwrap(), 'a');
- ///
- /// // This call keeps 'b' and 'c' in the vec.
- /// drain.keep_rest();
- ///
- /// // If we wouldn't call `keep_rest()`,
- /// // `vec` would be empty.
- /// assert_eq!(vec, ['b', 'c']);
- /// ```
- #[unstable(feature = "drain_keep_rest", issue = "101122")]
- pub fn keep_rest(self) {
- // At this moment layout looks like this:
- //
- // [head] [yielded by next] [unyielded] [yielded by next_back] [tail]
- // ^-- start \_________/-- unyielded_len \____/-- self.tail_len
- // ^-- unyielded_ptr ^-- tail
- //
- // Normally `Drop` impl would drop [unyielded] and then move [tail] to the `start`.
- // Here we want to
- // 1. Move [unyielded] to `start`
- // 2. Move [tail] to a new start at `start + len(unyielded)`
- // 3. Update length of the original vec to `len(head) + len(unyielded) + len(tail)`
- // a. In case of ZST, this is the only thing we want to do
- // 4. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do
- let mut this = ManuallyDrop::new(self);
-
- unsafe {
- let source_vec = this.vec.as_mut();
-
- let start = source_vec.len();
- let tail = this.tail_start;
-
- let unyielded_len = this.iter.len();
- let unyielded_ptr = this.iter.as_slice().as_ptr();
-
- // ZSTs have no identity, so we don't need to move them around.
- if !T::IS_ZST {
- let start_ptr = source_vec.as_mut_ptr().add(start);
-
- // memmove back unyielded elements
- if unyielded_ptr != start_ptr {
- let src = unyielded_ptr;
- let dst = start_ptr;
-
- ptr::copy(src, dst, unyielded_len);
- }
-
- // memmove back untouched tail
- if tail != (start + unyielded_len) {
- let src = source_vec.as_ptr().add(tail);
- let dst = start_ptr.add(unyielded_len);
- ptr::copy(src, dst, this.tail_len);
- }
- }
-
- source_vec.set_len(start + unyielded_len + this.tail_len);
- }
- }
-}
-
-#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
-impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> {
- fn as_ref(&self) -> &[T] {
- self.as_slice()
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-unsafe impl<T: Sync, A: Sync + Allocator> Sync for Drain<'_, T, A> {}
-#[stable(feature = "drain", since = "1.6.0")]
-unsafe impl<T: Send, A: Send + Allocator> Send for Drain<'_, T, A> {}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
- type Item = T;
-
- #[inline]
- fn next(&mut self) -> Option<T> {
- self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.iter.size_hint()
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
- #[inline]
- fn next_back(&mut self) -> Option<T> {
- self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T, A: Allocator> Drop for Drain<'_, T, A> {
- fn drop(&mut self) {
- /// Moves back the un-`Drain`ed elements to restore the original `Vec`.
- struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>);
-
- impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
- fn drop(&mut self) {
- if self.0.tail_len > 0 {
- unsafe {
- let source_vec = self.0.vec.as_mut();
- // memmove back untouched tail, update to new length
- let start = source_vec.len();
- let tail = self.0.tail_start;
- if tail != start {
- let src = source_vec.as_ptr().add(tail);
- let dst = source_vec.as_mut_ptr().add(start);
- ptr::copy(src, dst, self.0.tail_len);
- }
- source_vec.set_len(start + self.0.tail_len);
- }
- }
- }
- }
-
- let iter = mem::take(&mut self.iter);
- let drop_len = iter.len();
-
- let mut vec = self.vec;
-
- if T::IS_ZST {
- // ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount.
- // this can be achieved by manipulating the Vec length instead of moving values out from `iter`.
- unsafe {
- let vec = vec.as_mut();
- let old_len = vec.len();
- vec.set_len(old_len + drop_len + self.tail_len);
- vec.truncate(old_len + self.tail_len);
- }
-
- return;
- }
-
- // ensure elements are moved back into their appropriate places, even when drop_in_place panics
- let _guard = DropGuard(self);
-
- if drop_len == 0 {
- return;
- }
-
- // as_slice() must only be called when iter.len() is > 0 because
- // it also gets touched by vec::Splice which may turn it into a dangling pointer
- // which would make it and the vec pointer point to different allocations which would
- // lead to invalid pointer arithmetic below.
- let drop_ptr = iter.as_slice().as_ptr();
-
- unsafe {
- // drop_ptr comes from a slice::Iter which only gives us a &[T] but for drop_in_place
- // a pointer with mutable provenance is necessary. Therefore we must reconstruct
- // it from the original vec but also avoid creating a &mut to the front since that could
- // invalidate raw pointers to it which some unsafe code might rely on.
- let vec_ptr = vec.as_mut().as_mut_ptr();
- let drop_offset = drop_ptr.sub_ptr(vec_ptr);
- let to_drop = ptr::slice_from_raw_parts_mut(vec_ptr.add(drop_offset), drop_len);
- ptr::drop_in_place(to_drop);
- }
- }
-}
-
-#[stable(feature = "drain", since = "1.6.0")]
-impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
- fn is_empty(&self) -> bool {
- self.iter.is_empty()
- }
-}
-
-#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<T, A: Allocator> TrustedLen for Drain<'_, T, A> {}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
diff --git a/rust/alloc/vec/extract_if.rs b/rust/alloc/vec/extract_if.rs
deleted file mode 100644
index f314a51d4d..0000000000
--- a/rust/alloc/vec/extract_if.rs
+++ /dev/null
@@ -1,115 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-use crate::alloc::{Allocator, Global};
-use core::ptr;
-use core::slice;
-
-use super::Vec;
-
-/// An iterator which uses a closure to determine if an element should be removed.
-///
-/// This struct is created by [`Vec::extract_if`].
-/// See its documentation for more.
-///
-/// # Example
-///
-/// ```
-/// #![feature(extract_if)]
-///
-/// let mut v = vec![0, 1, 2];
-/// let iter: std::vec::ExtractIf<'_, _, _> = v.extract_if(|x| *x % 2 == 0);
-/// ```
-#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
-#[derive(Debug)]
-#[must_use = "iterators are lazy and do nothing unless consumed"]
-pub struct ExtractIf<
- 'a,
- T,
- F,
- #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
-> where
- F: FnMut(&mut T) -> bool,
-{
- pub(super) vec: &'a mut Vec<T, A>,
- /// The index of the item that will be inspected by the next call to `next`.
- pub(super) idx: usize,
- /// The number of items that have been drained (removed) thus far.
- pub(super) del: usize,
- /// The original length of `vec` prior to draining.
- pub(super) old_len: usize,
- /// The filter test predicate.
- pub(super) pred: F,
-}
-
-impl<T, F, A: Allocator> ExtractIf<'_, T, F, A>
-where
- F: FnMut(&mut T) -> bool,
-{
- /// Returns a reference to the underlying allocator.
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn allocator(&self) -> &A {
- self.vec.allocator()
- }
-}
-
-#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
-impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
-where
- F: FnMut(&mut T) -> bool,
-{
- type Item = T;
-
- fn next(&mut self) -> Option<T> {
- unsafe {
- while self.idx < self.old_len {
- let i = self.idx;
- let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
- let drained = (self.pred)(&mut v[i]);
- // Update the index *after* the predicate is called. If the index
- // is updated prior and the predicate panics, the element at this
- // index would be leaked.
- self.idx += 1;
- if drained {
- self.del += 1;
- return Some(ptr::read(&v[i]));
- } else if self.del > 0 {
- let del = self.del;
- let src: *const T = &v[i];
- let dst: *mut T = &mut v[i - del];
- ptr::copy_nonoverlapping(src, dst, 1);
- }
- }
- None
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (0, Some(self.old_len - self.idx))
- }
-}
-
-#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
-impl<T, F, A: Allocator> Drop for ExtractIf<'_, T, F, A>
-where
- F: FnMut(&mut T) -> bool,
-{
- fn drop(&mut self) {
- unsafe {
- if self.idx < self.old_len && self.del > 0 {
- // This is a pretty messed up state, and there isn't really an
- // obviously right thing to do. We don't want to keep trying
- // to execute `pred`, so we just backshift all the unprocessed
- // elements and tell the vec that they still exist. The backshift
- // is required to prevent a double-drop of the last successfully
- // drained item prior to a panic in the predicate.
- let ptr = self.vec.as_mut_ptr();
- let src = ptr.add(self.idx);
- let dst = src.sub(self.del);
- let tail_len = self.old_len - self.idx;
- src.copy_to(dst, tail_len);
- }
- self.vec.set_len(self.old_len - self.del);
- }
- }
-}
diff --git a/rust/alloc/vec/into_iter.rs b/rust/alloc/vec/into_iter.rs
deleted file mode 100644
index 136bfe94af..0000000000
--- a/rust/alloc/vec/into_iter.rs
+++ /dev/null
@@ -1,454 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-#[cfg(not(no_global_oom_handling))]
-use super::AsVecIntoIter;
-use crate::alloc::{Allocator, Global};
-#[cfg(not(no_global_oom_handling))]
-use crate::collections::VecDeque;
-use crate::raw_vec::RawVec;
-use core::array;
-use core::fmt;
-use core::iter::{
- FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
- TrustedRandomAccessNoCoerce,
-};
-use core::marker::PhantomData;
-use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
-use core::num::NonZeroUsize;
-#[cfg(not(no_global_oom_handling))]
-use core::ops::Deref;
-use core::ptr::{self, NonNull};
-use core::slice::{self};
-
-/// An iterator that moves out of a vector.
-///
-/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
-/// (provided by the [`IntoIterator`] trait).
-///
-/// # Example
-///
-/// ```
-/// let v = vec![0, 1, 2];
-/// let iter: std::vec::IntoIter<_> = v.into_iter();
-/// ```
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_insignificant_dtor]
-pub struct IntoIter<
- T,
- #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
-> {
- pub(super) buf: NonNull<T>,
- pub(super) phantom: PhantomData<T>,
- pub(super) cap: usize,
- // the drop impl reconstructs a RawVec from buf, cap and alloc
- // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
- pub(super) alloc: ManuallyDrop<A>,
- pub(super) ptr: *const T,
- pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that
- // ptr == end is a quick test for the Iterator being empty, that works
- // for both ZST and non-ZST.
-}
-
-#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
-impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
- }
-}
-
-impl<T, A: Allocator> IntoIter<T, A> {
- /// Returns the remaining items of this iterator as a slice.
- ///
- /// # Examples
- ///
- /// ```
- /// let vec = vec!['a', 'b', 'c'];
- /// let mut into_iter = vec.into_iter();
- /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
- /// let _ = into_iter.next().unwrap();
- /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
- /// ```
- #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
- pub fn as_slice(&self) -> &[T] {
- unsafe { slice::from_raw_parts(self.ptr, self.len()) }
- }
-
- /// Returns the remaining items of this iterator as a mutable slice.
- ///
- /// # Examples
- ///
- /// ```
- /// let vec = vec!['a', 'b', 'c'];
- /// let mut into_iter = vec.into_iter();
- /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
- /// into_iter.as_mut_slice()[2] = 'z';
- /// assert_eq!(into_iter.next().unwrap(), 'a');
- /// assert_eq!(into_iter.next().unwrap(), 'b');
- /// assert_eq!(into_iter.next().unwrap(), 'z');
- /// ```
- #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
- pub fn as_mut_slice(&mut self) -> &mut [T] {
- unsafe { &mut *self.as_raw_mut_slice() }
- }
-
- /// Returns a reference to the underlying allocator.
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn allocator(&self) -> &A {
- &self.alloc
- }
-
- fn as_raw_mut_slice(&mut self) -> *mut [T] {
- ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
- }
-
- /// Drops remaining elements and relinquishes the backing allocation.
- /// This method guarantees it won't panic before relinquishing
- /// the backing allocation.
- ///
- /// This is roughly equivalent to the following, but more efficient
- ///
- /// ```
- /// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
- /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter());
- /// (&mut into_iter).for_each(drop);
- /// std::mem::forget(into_iter);
- /// ```
- ///
- /// This method is used by in-place iteration, refer to the vec::in_place_collect
- /// documentation for an overview.
- #[cfg(not(no_global_oom_handling))]
- pub(super) fn forget_allocation_drop_remaining(&mut self) {
- let remaining = self.as_raw_mut_slice();
-
- // overwrite the individual fields instead of creating a new
- // struct and then overwriting &mut self.
- // this creates less assembly
- self.cap = 0;
- self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
- self.ptr = self.buf.as_ptr();
- self.end = self.buf.as_ptr();
-
- // Dropping the remaining elements can panic, so this needs to be
- // done only after updating the other fields.
- unsafe {
- ptr::drop_in_place(remaining);
- }
- }
-
- /// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
- pub(crate) fn forget_remaining_elements(&mut self) {
- // For th ZST case, it is crucial that we mutate `end` here, not `ptr`.
- // `ptr` must stay aligned, while `end` may be unaligned.
- self.end = self.ptr;
- }
-
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- pub(crate) fn into_vecdeque(self) -> VecDeque<T, A> {
- // Keep our `Drop` impl from dropping the elements and the allocator
- let mut this = ManuallyDrop::new(self);
-
- // SAFETY: This allocation originally came from a `Vec`, so it passes
- // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`,
- // so the `sub_ptr`s below cannot wrap, and will produce a well-formed
- // range. `end` ≤ `buf + cap`, so the range will be in-bounds.
- // Taking `alloc` is ok because nothing else is going to look at it,
- // since our `Drop` impl isn't going to run so there's no more code.
- unsafe {
- let buf = this.buf.as_ptr();
- let initialized = if T::IS_ZST {
- // All the pointers are the same for ZSTs, so it's fine to
- // say that they're all at the beginning of the "allocation".
- 0..this.len()
- } else {
- this.ptr.sub_ptr(buf)..this.end.sub_ptr(buf)
- };
- let cap = this.cap;
- let alloc = ManuallyDrop::take(&mut this.alloc);
- VecDeque::from_contiguous_raw_parts_in(buf, initialized, cap, alloc)
- }
- }
-}
-
-#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
-impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
- fn as_ref(&self) -> &[T] {
- self.as_slice()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> Iterator for IntoIter<T, A> {
- type Item = T;
-
- #[inline]
- fn next(&mut self) -> Option<T> {
- if self.ptr == self.end {
- None
- } else if T::IS_ZST {
- // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
- // reducing the `end`.
- self.end = self.end.wrapping_byte_sub(1);
-
- // Make up a value of this ZST.
- Some(unsafe { mem::zeroed() })
- } else {
- let old = self.ptr;
- self.ptr = unsafe { self.ptr.add(1) };
-
- Some(unsafe { ptr::read(old) })
- }
- }
-
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- let exact = if T::IS_ZST {
- self.end.addr().wrapping_sub(self.ptr.addr())
- } else {
- unsafe { self.end.sub_ptr(self.ptr) }
- };
- (exact, Some(exact))
- }
-
- #[inline]
- fn advance_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
- let step_size = self.len().min(n);
- let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size);
- if T::IS_ZST {
- // See `next` for why we sub `end` here.
- self.end = self.end.wrapping_byte_sub(step_size);
- } else {
- // SAFETY: the min() above ensures that step_size is in bounds
- self.ptr = unsafe { self.ptr.add(step_size) };
- }
- // SAFETY: the min() above ensures that step_size is in bounds
- unsafe {
- ptr::drop_in_place(to_drop);
- }
- NonZeroUsize::new(n - step_size).map_or(Ok(()), Err)
- }
-
- #[inline]
- fn count(self) -> usize {
- self.len()
- }
-
- #[inline]
- fn next_chunk<const N: usize>(&mut self) -> Result<[T; N], core::array::IntoIter<T, N>> {
- let mut raw_ary = MaybeUninit::uninit_array();
-
- let len = self.len();
-
- if T::IS_ZST {
- if len < N {
- self.forget_remaining_elements();
- // Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
- return Err(unsafe { array::IntoIter::new_unchecked(raw_ary, 0..len) });
- }
-
- self.end = self.end.wrapping_byte_sub(N);
- // Safety: ditto
- return Ok(unsafe { raw_ary.transpose().assume_init() });
- }
-
- if len < N {
- // Safety: `len` indicates that this many elements are available and we just checked that
- // it fits into the array.
- unsafe {
- ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, len);
- self.forget_remaining_elements();
- return Err(array::IntoIter::new_unchecked(raw_ary, 0..len));
- }
- }
-
- // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
- // the array.
- return unsafe {
- ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N);
- self.ptr = self.ptr.add(N);
- Ok(raw_ary.transpose().assume_init())
- };
- }
-
- unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
- where
- Self: TrustedRandomAccessNoCoerce,
- {
- // SAFETY: the caller must guarantee that `i` is in bounds of the
- // `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
- // is guaranteed to pointer to an element of the `Vec<T>` and
- // thus guaranteed to be valid to dereference.
- //
- // Also note the implementation of `Self: TrustedRandomAccess` requires
- // that `T: Copy` so reading elements from the buffer doesn't invalidate
- // them for `Drop`.
- unsafe { if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) } }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
- #[inline]
- fn next_back(&mut self) -> Option<T> {
- if self.end == self.ptr {
- None
- } else if T::IS_ZST {
- // See above for why 'ptr.offset' isn't used
- self.end = self.end.wrapping_byte_sub(1);
-
- // Make up a value of this ZST.
- Some(unsafe { mem::zeroed() })
- } else {
- self.end = unsafe { self.end.sub(1) };
-
- Some(unsafe { ptr::read(self.end) })
- }
- }
-
- #[inline]
- fn advance_back_by(&mut self, n: usize) -> Result<(), NonZeroUsize> {
- let step_size = self.len().min(n);
- if T::IS_ZST {
- // SAFETY: same as for advance_by()
- self.end = self.end.wrapping_byte_sub(step_size);
- } else {
- // SAFETY: same as for advance_by()
- self.end = unsafe { self.end.sub(step_size) };
- }
- let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
- // SAFETY: same as for advance_by()
- unsafe {
- ptr::drop_in_place(to_drop);
- }
- NonZeroUsize::new(n - step_size).map_or(Ok(()), Err)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
- fn is_empty(&self) -> bool {
- self.ptr == self.end
- }
-}
-
-#[stable(feature = "fused", since = "1.26.0")]
-impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
-
-#[doc(hidden)]
-#[unstable(issue = "none", feature = "trusted_fused")]
-unsafe impl<T, A: Allocator> TrustedFused for IntoIter<T, A> {}
-
-#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
-
-#[stable(feature = "default_iters", since = "1.70.0")]
-impl<T, A> Default for IntoIter<T, A>
-where
- A: Allocator + Default,
-{
- /// Creates an empty `vec::IntoIter`.
- ///
- /// ```
- /// # use std::vec;
- /// let iter: vec::IntoIter<u8> = Default::default();
- /// assert_eq!(iter.len(), 0);
- /// assert_eq!(iter.as_slice(), &[]);
- /// ```
- fn default() -> Self {
- super::Vec::new_in(Default::default()).into_iter()
- }
-}
-
-#[doc(hidden)]
-#[unstable(issue = "none", feature = "std_internals")]
-#[rustc_unsafe_specialization_marker]
-pub trait NonDrop {}
-
-// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
-// and thus we can't implement drop-handling
-#[unstable(issue = "none", feature = "std_internals")]
-impl<T: Copy> NonDrop for T {}
-
-#[doc(hidden)]
-#[unstable(issue = "none", feature = "std_internals")]
-// TrustedRandomAccess (without NoCoerce) must not be implemented because
-// subtypes/supertypes of `T` might not be `NonDrop`
-unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
-where
- T: NonDrop,
-{
- const MAY_HAVE_SIDE_EFFECT: bool = false;
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
-impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
- #[cfg(not(test))]
- fn clone(&self) -> Self {
- self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
- }
- #[cfg(test)]
- fn clone(&self) -> Self {
- crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
- fn drop(&mut self) {
- struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
-
- impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
- fn drop(&mut self) {
- unsafe {
- // `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
- let alloc = ManuallyDrop::take(&mut self.0.alloc);
- // RawVec handles deallocation
- let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
- }
- }
- }
-
- let guard = DropGuard(self);
- // destroy the remaining elements
- unsafe {
- ptr::drop_in_place(guard.0.as_raw_mut_slice());
- }
- // now `guard` will be dropped and do the rest
- }
-}
-
-// In addition to the SAFETY invariants of the following three unsafe traits
-// also refer to the vec::in_place_collect module documentation to get an overview
-#[unstable(issue = "none", feature = "inplace_iteration")]
-#[doc(hidden)]
-unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {
- const EXPAND_BY: Option<NonZeroUsize> = NonZeroUsize::new(1);
- const MERGE_BY: Option<NonZeroUsize> = NonZeroUsize::new(1);
-}
-
-#[unstable(issue = "none", feature = "inplace_iteration")]
-#[doc(hidden)]
-unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
- type Source = Self;
-
- #[inline]
- unsafe fn as_inner(&mut self) -> &mut Self::Source {
- self
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-unsafe impl<T> AsVecIntoIter for IntoIter<T> {
- type Item = T;
-
- fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
- self
- }
-}
diff --git a/rust/alloc/vec/is_zero.rs b/rust/alloc/vec/is_zero.rs
deleted file mode 100644
index d928dcf90e..0000000000
--- a/rust/alloc/vec/is_zero.rs
+++ /dev/null
@@ -1,204 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-use core::num::{Saturating, Wrapping};
-
-use crate::boxed::Box;
-
-#[rustc_specialization_trait]
-pub(super) unsafe trait IsZero {
- /// Whether this value's representation is all zeros,
- /// or can be represented with all zeroes.
- fn is_zero(&self) -> bool;
-}
-
-macro_rules! impl_is_zero {
- ($t:ty, $is_zero:expr) => {
- unsafe impl IsZero for $t {
- #[inline]
- fn is_zero(&self) -> bool {
- $is_zero(*self)
- }
- }
- };
-}
-
-impl_is_zero!(i8, |x| x == 0); // It is needed to impl for arrays and tuples of i8.
-impl_is_zero!(i16, |x| x == 0);
-impl_is_zero!(i32, |x| x == 0);
-impl_is_zero!(i64, |x| x == 0);
-impl_is_zero!(i128, |x| x == 0);
-impl_is_zero!(isize, |x| x == 0);
-
-impl_is_zero!(u8, |x| x == 0); // It is needed to impl for arrays and tuples of u8.
-impl_is_zero!(u16, |x| x == 0);
-impl_is_zero!(u32, |x| x == 0);
-impl_is_zero!(u64, |x| x == 0);
-impl_is_zero!(u128, |x| x == 0);
-impl_is_zero!(usize, |x| x == 0);
-
-impl_is_zero!(bool, |x| x == false);
-impl_is_zero!(char, |x| x == '\0');
-
-impl_is_zero!(f32, |x: f32| x.to_bits() == 0);
-impl_is_zero!(f64, |x: f64| x.to_bits() == 0);
-
-unsafe impl<T> IsZero for *const T {
- #[inline]
- fn is_zero(&self) -> bool {
- (*self).is_null()
- }
-}
-
-unsafe impl<T> IsZero for *mut T {
- #[inline]
- fn is_zero(&self) -> bool {
- (*self).is_null()
- }
-}
-
-unsafe impl<T: IsZero, const N: usize> IsZero for [T; N] {
- #[inline]
- fn is_zero(&self) -> bool {
- // Because this is generated as a runtime check, it's not obvious that
- // it's worth doing if the array is really long. The threshold here
- // is largely arbitrary, but was picked because as of 2022-07-01 LLVM
- // fails to const-fold the check in `vec![[1; 32]; n]`
- // See https://github.com/rust-lang/rust/pull/97581#issuecomment-1166628022
- // Feel free to tweak if you have better evidence.
-
- N <= 16 && self.iter().all(IsZero::is_zero)
- }
-}
-
-// This is recursive macro.
-macro_rules! impl_for_tuples {
- // Stopper
- () => {
- // No use for implementing for empty tuple because it is ZST.
- };
- ($first_arg:ident $(,$rest:ident)*) => {
- unsafe impl <$first_arg: IsZero, $($rest: IsZero,)*> IsZero for ($first_arg, $($rest,)*){
- #[inline]
- fn is_zero(&self) -> bool{
- // Destructure tuple to N references
- // Rust allows to hide generic params by local variable names.
- #[allow(non_snake_case)]
- let ($first_arg, $($rest,)*) = self;
-
- $first_arg.is_zero()
- $( && $rest.is_zero() )*
- }
- }
-
- impl_for_tuples!($($rest),*);
- }
-}
-
-impl_for_tuples!(A, B, C, D, E, F, G, H);
-
-// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
-// For fat pointers, the bytes that would be the pointer metadata in the `Some`
-// variant are padding in the `None` variant, so ignoring them and
-// zero-initializing instead is ok.
-// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of
-// `SpecFromElem`.
-
-unsafe impl<T: ?Sized> IsZero for Option<&T> {
- #[inline]
- fn is_zero(&self) -> bool {
- self.is_none()
- }
-}
-
-unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
- #[inline]
- fn is_zero(&self) -> bool {
- self.is_none()
- }
-}
-
-// `Option<num::NonZeroU32>` and similar have a representation guarantee that
-// they're the same size as the corresponding `u32` type, as well as a guarantee
-// that transmuting between `NonZeroU32` and `Option<num::NonZeroU32>` works.
-// While the documentation officially makes it UB to transmute from `None`,
-// we're the standard library so we can make extra inferences, and we know that
-// the only niche available to represent `None` is the one that's all zeros.
-
-macro_rules! impl_is_zero_option_of_nonzero {
- ($($t:ident,)+) => {$(
- unsafe impl IsZero for Option<core::num::$t> {
- #[inline]
- fn is_zero(&self) -> bool {
- self.is_none()
- }
- }
- )+};
-}
-
-impl_is_zero_option_of_nonzero!(
- NonZeroU8,
- NonZeroU16,
- NonZeroU32,
- NonZeroU64,
- NonZeroU128,
- NonZeroI8,
- NonZeroI16,
- NonZeroI32,
- NonZeroI64,
- NonZeroI128,
- NonZeroUsize,
- NonZeroIsize,
-);
-
-macro_rules! impl_is_zero_option_of_num {
- ($($t:ty,)+) => {$(
- unsafe impl IsZero for Option<$t> {
- #[inline]
- fn is_zero(&self) -> bool {
- const {
- let none: Self = unsafe { core::mem::MaybeUninit::zeroed().assume_init() };
- assert!(none.is_none());
- }
- self.is_none()
- }
- }
- )+};
-}
-
-impl_is_zero_option_of_num!(u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, usize, isize,);
-
-unsafe impl<T: IsZero> IsZero for Wrapping<T> {
- #[inline]
- fn is_zero(&self) -> bool {
- self.0.is_zero()
- }
-}
-
-unsafe impl<T: IsZero> IsZero for Saturating<T> {
- #[inline]
- fn is_zero(&self) -> bool {
- self.0.is_zero()
- }
-}
-
-macro_rules! impl_for_optional_bool {
- ($($t:ty,)+) => {$(
- unsafe impl IsZero for $t {
- #[inline]
- fn is_zero(&self) -> bool {
- // SAFETY: This is *not* a stable layout guarantee, but
- // inside `core` we're allowed to rely on the current rustc
- // behaviour that options of bools will be one byte with
- // no padding, so long as they're nested less than 254 deep.
- let raw: u8 = unsafe { core::mem::transmute(*self) };
- raw == 0
- }
- }
- )+};
-}
-impl_for_optional_bool! {
- Option<bool>,
- Option<Option<bool>>,
- Option<Option<Option<bool>>>,
- // Could go further, but not worth the metadata overhead
-}
diff --git a/rust/alloc/vec/mod.rs b/rust/alloc/vec/mod.rs
deleted file mode 100644
index 220fb9d6f4..0000000000
--- a/rust/alloc/vec/mod.rs
+++ /dev/null
@@ -1,3683 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-//! A contiguous growable array type with heap-allocated contents, written
-//! `Vec<T>`.
-//!
-//! Vectors have *O*(1) indexing, amortized *O*(1) push (to the end) and
-//! *O*(1) pop (from the end).
-//!
-//! Vectors ensure they never allocate more than `isize::MAX` bytes.
-//!
-//! # Examples
-//!
-//! You can explicitly create a [`Vec`] with [`Vec::new`]:
-//!
-//! ```
-//! let v: Vec<i32> = Vec::new();
-//! ```
-//!
-//! ...or by using the [`vec!`] macro:
-//!
-//! ```
-//! let v: Vec<i32> = vec![];
-//!
-//! let v = vec![1, 2, 3, 4, 5];
-//!
-//! let v = vec![0; 10]; // ten zeroes
-//! ```
-//!
-//! You can [`push`] values onto the end of a vector (which will grow the vector
-//! as needed):
-//!
-//! ```
-//! let mut v = vec![1, 2];
-//!
-//! v.push(3);
-//! ```
-//!
-//! Popping values works in much the same way:
-//!
-//! ```
-//! let mut v = vec![1, 2];
-//!
-//! let two = v.pop();
-//! ```
-//!
-//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits):
-//!
-//! ```
-//! let mut v = vec![1, 2, 3];
-//! let three = v[2];
-//! v[1] = v[1] + 5;
-//! ```
-//!
-//! [`push`]: Vec::push
-
-#![stable(feature = "rust1", since = "1.0.0")]
-
-#[cfg(not(no_global_oom_handling))]
-use core::cmp;
-use core::cmp::Ordering;
-use core::fmt;
-use core::hash::{Hash, Hasher};
-use core::iter;
-use core::marker::PhantomData;
-use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
-use core::ops::{self, Index, IndexMut, Range, RangeBounds};
-use core::ptr::{self, NonNull};
-use core::slice::{self, SliceIndex};
-
-use crate::alloc::{Allocator, Global};
-#[cfg(not(no_borrow))]
-use crate::borrow::{Cow, ToOwned};
-use crate::boxed::Box;
-use crate::collections::{TryReserveError, TryReserveErrorKind};
-use crate::raw_vec::RawVec;
-
-#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
-pub use self::extract_if::ExtractIf;
-
-mod extract_if;
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "vec_splice", since = "1.21.0")]
-pub use self::splice::Splice;
-
-#[cfg(not(no_global_oom_handling))]
-mod splice;
-
-#[stable(feature = "drain", since = "1.6.0")]
-pub use self::drain::Drain;
-
-mod drain;
-
-#[cfg(not(no_borrow))]
-#[cfg(not(no_global_oom_handling))]
-mod cow;
-
-#[cfg(not(no_global_oom_handling))]
-pub(crate) use self::in_place_collect::AsVecIntoIter;
-#[stable(feature = "rust1", since = "1.0.0")]
-pub use self::into_iter::IntoIter;
-
-mod into_iter;
-
-#[cfg(not(no_global_oom_handling))]
-use self::is_zero::IsZero;
-
-#[cfg(not(no_global_oom_handling))]
-mod is_zero;
-
-#[cfg(not(no_global_oom_handling))]
-mod in_place_collect;
-
-mod partial_eq;
-
-#[cfg(not(no_global_oom_handling))]
-use self::spec_from_elem::SpecFromElem;
-
-#[cfg(not(no_global_oom_handling))]
-mod spec_from_elem;
-
-use self::set_len_on_drop::SetLenOnDrop;
-
-mod set_len_on_drop;
-
-#[cfg(not(no_global_oom_handling))]
-use self::in_place_drop::{InPlaceDrop, InPlaceDstDataSrcBufDrop};
-
-#[cfg(not(no_global_oom_handling))]
-mod in_place_drop;
-
-#[cfg(not(no_global_oom_handling))]
-use self::spec_from_iter_nested::SpecFromIterNested;
-
-#[cfg(not(no_global_oom_handling))]
-mod spec_from_iter_nested;
-
-#[cfg(not(no_global_oom_handling))]
-use self::spec_from_iter::SpecFromIter;
-
-#[cfg(not(no_global_oom_handling))]
-mod spec_from_iter;
-
-#[cfg(not(no_global_oom_handling))]
-use self::spec_extend::SpecExtend;
-
-use self::spec_extend::TrySpecExtend;
-
-mod spec_extend;
-
-/// A contiguous growable array type, written as `Vec<T>`, short for 'vector'.
-///
-/// # Examples
-///
-/// ```
-/// let mut vec = Vec::new();
-/// vec.push(1);
-/// vec.push(2);
-///
-/// assert_eq!(vec.len(), 2);
-/// assert_eq!(vec[0], 1);
-///
-/// assert_eq!(vec.pop(), Some(2));
-/// assert_eq!(vec.len(), 1);
-///
-/// vec[0] = 7;
-/// assert_eq!(vec[0], 7);
-///
-/// vec.extend([1, 2, 3]);
-///
-/// for x in &vec {
-/// println!("{x}");
-/// }
-/// assert_eq!(vec, [7, 1, 2, 3]);
-/// ```
-///
-/// The [`vec!`] macro is provided for convenient initialization:
-///
-/// ```
-/// let mut vec1 = vec![1, 2, 3];
-/// vec1.push(4);
-/// let vec2 = Vec::from([1, 2, 3, 4]);
-/// assert_eq!(vec1, vec2);
-/// ```
-///
-/// It can also initialize each element of a `Vec<T>` with a given value.
-/// This may be more efficient than performing allocation and initialization
-/// in separate steps, especially when initializing a vector of zeros:
-///
-/// ```
-/// let vec = vec![0; 5];
-/// assert_eq!(vec, [0, 0, 0, 0, 0]);
-///
-/// // The following is equivalent, but potentially slower:
-/// let mut vec = Vec::with_capacity(5);
-/// vec.resize(5, 0);
-/// assert_eq!(vec, [0, 0, 0, 0, 0]);
-/// ```
-///
-/// For more information, see
-/// [Capacity and Reallocation](#capacity-and-reallocation).
-///
-/// Use a `Vec<T>` as an efficient stack:
-///
-/// ```
-/// let mut stack = Vec::new();
-///
-/// stack.push(1);
-/// stack.push(2);
-/// stack.push(3);
-///
-/// while let Some(top) = stack.pop() {
-/// // Prints 3, 2, 1
-/// println!("{top}");
-/// }
-/// ```
-///
-/// # Indexing
-///
-/// The `Vec` type allows access to values by index, because it implements the
-/// [`Index`] trait. An example will be more explicit:
-///
-/// ```
-/// let v = vec![0, 2, 4, 6];
-/// println!("{}", v[1]); // it will display '2'
-/// ```
-///
-/// However be careful: if you try to access an index which isn't in the `Vec`,
-/// your software will panic! You cannot do this:
-///
-/// ```should_panic
-/// let v = vec![0, 2, 4, 6];
-/// println!("{}", v[6]); // it will panic!
-/// ```
-///
-/// Use [`get`] and [`get_mut`] if you want to check whether the index is in
-/// the `Vec`.
-///
-/// # Slicing
-///
-/// A `Vec` can be mutable. On the other hand, slices are read-only objects.
-/// To get a [slice][prim@slice], use [`&`]. Example:
-///
-/// ```
-/// fn read_slice(slice: &[usize]) {
-/// // ...
-/// }
-///
-/// let v = vec![0, 1];
-/// read_slice(&v);
-///
-/// // ... and that's all!
-/// // you can also do it like this:
-/// let u: &[usize] = &v;
-/// // or like this:
-/// let u: &[_] = &v;
-/// ```
-///
-/// In Rust, it's more common to pass slices as arguments rather than vectors
-/// when you just want to provide read access. The same goes for [`String`] and
-/// [`&str`].
-///
-/// # Capacity and reallocation
-///
-/// The capacity of a vector is the amount of space allocated for any future
-/// elements that will be added onto the vector. This is not to be confused with
-/// the *length* of a vector, which specifies the number of actual elements
-/// within the vector. If a vector's length exceeds its capacity, its capacity
-/// will automatically be increased, but its elements will have to be
-/// reallocated.
-///
-/// For example, a vector with capacity 10 and length 0 would be an empty vector
-/// with space for 10 more elements. Pushing 10 or fewer elements onto the
-/// vector will not change its capacity or cause reallocation to occur. However,
-/// if the vector's length is increased to 11, it will have to reallocate, which
-/// can be slow. For this reason, it is recommended to use [`Vec::with_capacity`]
-/// whenever possible to specify how big the vector is expected to get.
-///
-/// # Guarantees
-///
-/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees
-/// about its design. This ensures that it's as low-overhead as possible in
-/// the general case, and can be correctly manipulated in primitive ways
-/// by unsafe code. Note that these guarantees refer to an unqualified `Vec<T>`.
-/// If additional type parameters are added (e.g., to support custom allocators),
-/// overriding their defaults may change the behavior.
-///
-/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length)
-/// triplet. No more, no less. The order of these fields is completely
-/// unspecified, and you should use the appropriate methods to modify these.
-/// The pointer will never be null, so this type is null-pointer-optimized.
-///
-/// However, the pointer might not actually point to allocated memory. In particular,
-/// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`],
-/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`]
-/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
-/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
-/// the `Vec` might not report a [`capacity`] of 0*. `Vec` will allocate if and only
-/// if <code>[mem::size_of::\<T>]\() * [capacity]\() > 0</code>. In general, `Vec`'s allocation
-/// details are very subtle --- if you intend to allocate memory using a `Vec`
-/// and use it for something else (either to pass to unsafe code, or to build your
-/// own memory-backed collection), be sure to deallocate this memory by using
-/// `from_raw_parts` to recover the `Vec` and then dropping it.
-///
-/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap
-/// (as defined by the allocator Rust is configured to use by default), and its
-/// pointer points to [`len`] initialized, contiguous elements in order (what
-/// you would see if you coerced it to a slice), followed by <code>[capacity] - [len]</code>
-/// logically uninitialized, contiguous elements.
-///
-/// A vector containing the elements `'a'` and `'b'` with capacity 4 can be
-/// visualized as below. The top part is the `Vec` struct, it contains a
-/// pointer to the head of the allocation in the heap, length and capacity.
-/// The bottom part is the allocation on the heap, a contiguous memory block.
-///
-/// ```text
-/// ptr len capacity
-/// +--------+--------+--------+
-/// | 0x0123 | 2 | 4 |
-/// +--------+--------+--------+
-/// |
-/// v
-/// Heap +--------+--------+--------+--------+
-/// | 'a' | 'b' | uninit | uninit |
-/// +--------+--------+--------+--------+
-/// ```
-///
-/// - **uninit** represents memory that is not initialized, see [`MaybeUninit`].
-/// - Note: the ABI is not stable and `Vec` makes no guarantees about its memory
-/// layout (including the order of fields).
-///
-/// `Vec` will never perform a "small optimization" where elements are actually
-/// stored on the stack for two reasons:
-///
-/// * It would make it more difficult for unsafe code to correctly manipulate
-/// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were
-/// only moved, and it would be more difficult to determine if a `Vec` had
-/// actually allocated memory.
-///
-/// * It would penalize the general case, incurring an additional branch
-/// on every access.
-///
-/// `Vec` will never automatically shrink itself, even if completely empty. This
-/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec`
-/// and then filling it back up to the same [`len`] should incur no calls to
-/// the allocator. If you wish to free up unused memory, use
-/// [`shrink_to_fit`] or [`shrink_to`].
-///
-/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is
-/// sufficient. [`push`] and [`insert`] *will* (re)allocate if
-/// <code>[len] == [capacity]</code>. That is, the reported capacity is completely
-/// accurate, and can be relied on. It can even be used to manually free the memory
-/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even
-/// when not necessary.
-///
-/// `Vec` does not guarantee any particular growth strategy when reallocating
-/// when full, nor when [`reserve`] is called. The current strategy is basic
-/// and it may prove desirable to use a non-constant growth factor. Whatever
-/// strategy is used will of course guarantee *O*(1) amortized [`push`].
-///
-/// `vec![x; n]`, `vec![a, b, c, d]`, and
-/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec`
-/// with exactly the requested capacity. If <code>[len] == [capacity]</code>,
-/// (as is the case for the [`vec!`] macro), then a `Vec<T>` can be converted to
-/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements.
-///
-/// `Vec` will not specifically overwrite any data that is removed from it,
-/// but also won't specifically preserve it. Its uninitialized memory is
-/// scratch space that it may use however it wants. It will generally just do
-/// whatever is most efficient or otherwise easy to implement. Do not rely on
-/// removed data to be erased for security purposes. Even if you drop a `Vec`, its
-/// buffer may simply be reused by another allocation. Even if you zero a `Vec`'s memory
-/// first, that might not actually happen because the optimizer does not consider
-/// this a side-effect that must be preserved. There is one case which we will
-/// not break, however: using `unsafe` code to write to the excess capacity,
-/// and then increasing the length to match, is always valid.
-///
-/// Currently, `Vec` does not guarantee the order in which elements are dropped.
-/// The order has changed in the past and may change again.
-///
-/// [`get`]: slice::get
-/// [`get_mut`]: slice::get_mut
-/// [`String`]: crate::string::String
-/// [`&str`]: type@str
-/// [`shrink_to_fit`]: Vec::shrink_to_fit
-/// [`shrink_to`]: Vec::shrink_to
-/// [capacity]: Vec::capacity
-/// [`capacity`]: Vec::capacity
-/// [mem::size_of::\<T>]: core::mem::size_of
-/// [len]: Vec::len
-/// [`len`]: Vec::len
-/// [`push`]: Vec::push
-/// [`insert`]: Vec::insert
-/// [`reserve`]: Vec::reserve
-/// [`MaybeUninit`]: core::mem::MaybeUninit
-/// [owned slice]: Box
-#[stable(feature = "rust1", since = "1.0.0")]
-#[cfg_attr(not(test), rustc_diagnostic_item = "Vec")]
-#[rustc_insignificant_dtor]
-pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
- buf: RawVec<T, A>,
- len: usize,
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Inherent methods
-////////////////////////////////////////////////////////////////////////////////
-
-impl<T> Vec<T> {
- /// Constructs a new, empty `Vec<T>`.
- ///
- /// The vector will not allocate until elements are pushed onto it.
- ///
- /// # Examples
- ///
- /// ```
- /// # #![allow(unused_mut)]
- /// let mut vec: Vec<i32> = Vec::new();
- /// ```
- #[inline]
- #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[must_use]
- pub const fn new() -> Self {
- Vec { buf: RawVec::NEW, len: 0 }
- }
-
- /// Constructs a new, empty `Vec<T>` with at least the specified capacity.
- ///
- /// The vector will be able to hold at least `capacity` elements without
- /// reallocating. This method is allowed to allocate for more elements than
- /// `capacity`. If `capacity` is 0, the vector will not allocate.
- ///
- /// It is important to note that although the returned vector has the
- /// minimum *capacity* specified, the vector will have a zero *length*. For
- /// an explanation of the difference between length and capacity, see
- /// *[Capacity and reallocation]*.
- ///
- /// If it is important to know the exact allocated capacity of a `Vec`,
- /// always use the [`capacity`] method after construction.
- ///
- /// For `Vec<T>` where `T` is a zero-sized type, there will be no allocation
- /// and the capacity will always be `usize::MAX`.
- ///
- /// [Capacity and reallocation]: #capacity-and-reallocation
- /// [`capacity`]: Vec::capacity
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = Vec::with_capacity(10);
- ///
- /// // The vector contains no items, even though it has capacity for more
- /// assert_eq!(vec.len(), 0);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // These are all done without reallocating...
- /// for i in 0..10 {
- /// vec.push(i);
- /// }
- /// assert_eq!(vec.len(), 10);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // ...but this may make the vector reallocate
- /// vec.push(11);
- /// assert_eq!(vec.len(), 11);
- /// assert!(vec.capacity() >= 11);
- ///
- /// // A vector of a zero-sized type will always over-allocate, since no
- /// // allocation is necessary
- /// let vec_units = Vec::<()>::with_capacity(10);
- /// assert_eq!(vec_units.capacity(), usize::MAX);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- #[must_use]
- pub fn with_capacity(capacity: usize) -> Self {
- Self::with_capacity_in(capacity, Global)
- }
-
- /// Tries to construct a new, empty `Vec<T>` with at least the specified capacity.
- ///
- /// The vector will be able to hold at least `capacity` elements without
- /// reallocating. This method is allowed to allocate for more elements than
- /// `capacity`. If `capacity` is 0, the vector will not allocate.
- ///
- /// It is important to note that although the returned vector has the
- /// minimum *capacity* specified, the vector will have a zero *length*. For
- /// an explanation of the difference between length and capacity, see
- /// *[Capacity and reallocation]*.
- ///
- /// If it is important to know the exact allocated capacity of a `Vec`,
- /// always use the [`capacity`] method after construction.
- ///
- /// For `Vec<T>` where `T` is a zero-sized type, there will be no allocation
- /// and the capacity will always be `usize::MAX`.
- ///
- /// [Capacity and reallocation]: #capacity-and-reallocation
- /// [`capacity`]: Vec::capacity
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = Vec::try_with_capacity(10).unwrap();
- ///
- /// // The vector contains no items, even though it has capacity for more
- /// assert_eq!(vec.len(), 0);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // These are all done without reallocating...
- /// for i in 0..10 {
- /// vec.push(i);
- /// }
- /// assert_eq!(vec.len(), 10);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // ...but this may make the vector reallocate
- /// vec.push(11);
- /// assert_eq!(vec.len(), 11);
- /// assert!(vec.capacity() >= 11);
- ///
- /// let mut result = Vec::try_with_capacity(usize::MAX);
- /// assert!(result.is_err());
- ///
- /// // A vector of a zero-sized type will always over-allocate, since no
- /// // allocation is necessary
- /// let vec_units = Vec::<()>::try_with_capacity(10).unwrap();
- /// assert_eq!(vec_units.capacity(), usize::MAX);
- /// ```
- #[inline]
- #[stable(feature = "kernel", since = "1.0.0")]
- pub fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError> {
- Self::try_with_capacity_in(capacity, Global)
- }
-
- /// Creates a `Vec<T>` directly from a pointer, a capacity, and a length.
- ///
- /// # Safety
- ///
- /// This is highly unsafe, due to the number of invariants that aren't
- /// checked:
- ///
- /// * `ptr` must have been allocated using the global allocator, such as via
- /// the [`alloc::alloc`] function.
- /// * `T` needs to have the same alignment as what `ptr` was allocated with.
- /// (`T` having a less strict alignment is not sufficient, the alignment really
- /// needs to be equal to satisfy the [`dealloc`] requirement that memory must be
- /// allocated and deallocated with the same layout.)
- /// * The size of `T` times the `capacity` (ie. the allocated size in bytes) needs
- /// to be the same size as the pointer was allocated with. (Because similar to
- /// alignment, [`dealloc`] must be called with the same layout `size`.)
- /// * `length` needs to be less than or equal to `capacity`.
- /// * The first `length` values must be properly initialized values of type `T`.
- /// * `capacity` needs to be the capacity that the pointer was allocated with.
- /// * The allocated size in bytes must be no larger than `isize::MAX`.
- /// See the safety documentation of [`pointer::offset`].
- ///
- /// These requirements are always upheld by any `ptr` that has been allocated
- /// via `Vec<T>`. Other allocation sources are allowed if the invariants are
- /// upheld.
- ///
- /// Violating these may cause problems like corrupting the allocator's
- /// internal data structures. For example it is normally **not** safe
- /// to build a `Vec<u8>` from a pointer to a C `char` array with length
- /// `size_t`, doing so is only safe if the array was initially allocated by
- /// a `Vec` or `String`.
- /// It's also not safe to build one from a `Vec<u16>` and its length, because
- /// the allocator cares about the alignment, and these two types have different
- /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after
- /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1. To avoid
- /// these issues, it is often preferable to do casting/transmuting using
- /// [`slice::from_raw_parts`] instead.
- ///
- /// The ownership of `ptr` is effectively transferred to the
- /// `Vec<T>` which may then deallocate, reallocate or change the
- /// contents of memory pointed to by the pointer at will. Ensure
- /// that nothing else uses the pointer after calling this
- /// function.
- ///
- /// [`String`]: crate::string::String
- /// [`alloc::alloc`]: crate::alloc::alloc
- /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc
- ///
- /// # Examples
- ///
- /// ```
- /// use std::ptr;
- /// use std::mem;
- ///
- /// let v = vec![1, 2, 3];
- ///
- // FIXME Update this when vec_into_raw_parts is stabilized
- /// // Prevent running `v`'s destructor so we are in complete control
- /// // of the allocation.
- /// let mut v = mem::ManuallyDrop::new(v);
- ///
- /// // Pull out the various important pieces of information about `v`
- /// let p = v.as_mut_ptr();
- /// let len = v.len();
- /// let cap = v.capacity();
- ///
- /// unsafe {
- /// // Overwrite memory with 4, 5, 6
- /// for i in 0..len {
- /// ptr::write(p.add(i), 4 + i);
- /// }
- ///
- /// // Put everything back together into a Vec
- /// let rebuilt = Vec::from_raw_parts(p, len, cap);
- /// assert_eq!(rebuilt, [4, 5, 6]);
- /// }
- /// ```
- ///
- /// Using memory that was allocated elsewhere:
- ///
- /// ```rust
- /// use std::alloc::{alloc, Layout};
- ///
- /// fn main() {
- /// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
- ///
- /// let vec = unsafe {
- /// let mem = alloc(layout).cast::<u32>();
- /// if mem.is_null() {
- /// return;
- /// }
- ///
- /// mem.write(1_000_000);
- ///
- /// Vec::from_raw_parts(mem, 1, 16)
- /// };
- ///
- /// assert_eq!(vec, &[1_000_000]);
- /// assert_eq!(vec.capacity(), 16);
- /// }
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
- unsafe { Self::from_raw_parts_in(ptr, length, capacity, Global) }
- }
-}
-
-impl<T, A: Allocator> Vec<T, A> {
- /// Constructs a new, empty `Vec<T, A>`.
- ///
- /// The vector will not allocate until elements are pushed onto it.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// # #[allow(unused_mut)]
- /// let mut vec: Vec<i32, _> = Vec::new_in(System);
- /// ```
- #[inline]
- #[unstable(feature = "allocator_api", issue = "32838")]
- pub const fn new_in(alloc: A) -> Self {
- Vec { buf: RawVec::new_in(alloc), len: 0 }
- }
-
- /// Constructs a new, empty `Vec<T, A>` with at least the specified capacity
- /// with the provided allocator.
- ///
- /// The vector will be able to hold at least `capacity` elements without
- /// reallocating. This method is allowed to allocate for more elements than
- /// `capacity`. If `capacity` is 0, the vector will not allocate.
- ///
- /// It is important to note that although the returned vector has the
- /// minimum *capacity* specified, the vector will have a zero *length*. For
- /// an explanation of the difference between length and capacity, see
- /// *[Capacity and reallocation]*.
- ///
- /// If it is important to know the exact allocated capacity of a `Vec`,
- /// always use the [`capacity`] method after construction.
- ///
- /// For `Vec<T, A>` where `T` is a zero-sized type, there will be no allocation
- /// and the capacity will always be `usize::MAX`.
- ///
- /// [Capacity and reallocation]: #capacity-and-reallocation
- /// [`capacity`]: Vec::capacity
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// let mut vec = Vec::with_capacity_in(10, System);
- ///
- /// // The vector contains no items, even though it has capacity for more
- /// assert_eq!(vec.len(), 0);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // These are all done without reallocating...
- /// for i in 0..10 {
- /// vec.push(i);
- /// }
- /// assert_eq!(vec.len(), 10);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // ...but this may make the vector reallocate
- /// vec.push(11);
- /// assert_eq!(vec.len(), 11);
- /// assert!(vec.capacity() >= 11);
- ///
- /// // A vector of a zero-sized type will always over-allocate, since no
- /// // allocation is necessary
- /// let vec_units = Vec::<(), System>::with_capacity_in(10, System);
- /// assert_eq!(vec_units.capacity(), usize::MAX);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- #[unstable(feature = "allocator_api", issue = "32838")]
- pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
- Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 }
- }
-
- /// Tries to construct a new, empty `Vec<T, A>` with at least the specified capacity
- /// with the provided allocator.
- ///
- /// The vector will be able to hold at least `capacity` elements without
- /// reallocating. This method is allowed to allocate for more elements than
- /// `capacity`. If `capacity` is 0, the vector will not allocate.
- ///
- /// It is important to note that although the returned vector has the
- /// minimum *capacity* specified, the vector will have a zero *length*. For
- /// an explanation of the difference between length and capacity, see
- /// *[Capacity and reallocation]*.
- ///
- /// If it is important to know the exact allocated capacity of a `Vec`,
- /// always use the [`capacity`] method after construction.
- ///
- /// For `Vec<T, A>` where `T` is a zero-sized type, there will be no allocation
- /// and the capacity will always be `usize::MAX`.
- ///
- /// [Capacity and reallocation]: #capacity-and-reallocation
- /// [`capacity`]: Vec::capacity
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// let mut vec = Vec::try_with_capacity_in(10, System).unwrap();
- ///
- /// // The vector contains no items, even though it has capacity for more
- /// assert_eq!(vec.len(), 0);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // These are all done without reallocating...
- /// for i in 0..10 {
- /// vec.push(i);
- /// }
- /// assert_eq!(vec.len(), 10);
- /// assert!(vec.capacity() >= 10);
- ///
- /// // ...but this may make the vector reallocate
- /// vec.push(11);
- /// assert_eq!(vec.len(), 11);
- /// assert!(vec.capacity() >= 11);
- ///
- /// let mut result = Vec::try_with_capacity_in(usize::MAX, System);
- /// assert!(result.is_err());
- ///
- /// // A vector of a zero-sized type will always over-allocate, since no
- /// // allocation is necessary
- /// let vec_units = Vec::<(), System>::try_with_capacity_in(10, System).unwrap();
- /// assert_eq!(vec_units.capacity(), usize::MAX);
- /// ```
- #[inline]
- #[stable(feature = "kernel", since = "1.0.0")]
- pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
- Ok(Vec { buf: RawVec::try_with_capacity_in(capacity, alloc)?, len: 0 })
- }
-
- /// Creates a `Vec<T, A>` directly from a pointer, a capacity, a length,
- /// and an allocator.
- ///
- /// # Safety
- ///
- /// This is highly unsafe, due to the number of invariants that aren't
- /// checked:
- ///
- /// * `ptr` must be [*currently allocated*] via the given allocator `alloc`.
- /// * `T` needs to have the same alignment as what `ptr` was allocated with.
- /// (`T` having a less strict alignment is not sufficient, the alignment really
- /// needs to be equal to satisfy the [`dealloc`] requirement that memory must be
- /// allocated and deallocated with the same layout.)
- /// * The size of `T` times the `capacity` (ie. the allocated size in bytes) needs
- /// to be the same size as the pointer was allocated with. (Because similar to
- /// alignment, [`dealloc`] must be called with the same layout `size`.)
- /// * `length` needs to be less than or equal to `capacity`.
- /// * The first `length` values must be properly initialized values of type `T`.
- /// * `capacity` needs to [*fit*] the layout size that the pointer was allocated with.
- /// * The allocated size in bytes must be no larger than `isize::MAX`.
- /// See the safety documentation of [`pointer::offset`].
- ///
- /// These requirements are always upheld by any `ptr` that has been allocated
- /// via `Vec<T, A>`. Other allocation sources are allowed if the invariants are
- /// upheld.
- ///
- /// Violating these may cause problems like corrupting the allocator's
- /// internal data structures. For example it is **not** safe
- /// to build a `Vec<u8>` from a pointer to a C `char` array with length `size_t`.
- /// It's also not safe to build one from a `Vec<u16>` and its length, because
- /// the allocator cares about the alignment, and these two types have different
- /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after
- /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1.
- ///
- /// The ownership of `ptr` is effectively transferred to the
- /// `Vec<T>` which may then deallocate, reallocate or change the
- /// contents of memory pointed to by the pointer at will. Ensure
- /// that nothing else uses the pointer after calling this
- /// function.
- ///
- /// [`String`]: crate::string::String
- /// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc
- /// [*currently allocated*]: crate::alloc::Allocator#currently-allocated-memory
- /// [*fit*]: crate::alloc::Allocator#memory-fitting
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::System;
- ///
- /// use std::ptr;
- /// use std::mem;
- ///
- /// let mut v = Vec::with_capacity_in(3, System);
- /// v.push(1);
- /// v.push(2);
- /// v.push(3);
- ///
- // FIXME Update this when vec_into_raw_parts is stabilized
- /// // Prevent running `v`'s destructor so we are in complete control
- /// // of the allocation.
- /// let mut v = mem::ManuallyDrop::new(v);
- ///
- /// // Pull out the various important pieces of information about `v`
- /// let p = v.as_mut_ptr();
- /// let len = v.len();
- /// let cap = v.capacity();
- /// let alloc = v.allocator();
- ///
- /// unsafe {
- /// // Overwrite memory with 4, 5, 6
- /// for i in 0..len {
- /// ptr::write(p.add(i), 4 + i);
- /// }
- ///
- /// // Put everything back together into a Vec
- /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone());
- /// assert_eq!(rebuilt, [4, 5, 6]);
- /// }
- /// ```
- ///
- /// Using memory that was allocated elsewhere:
- ///
- /// ```rust
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::{AllocError, Allocator, Global, Layout};
- ///
- /// fn main() {
- /// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
- ///
- /// let vec = unsafe {
- /// let mem = match Global.allocate(layout) {
- /// Ok(mem) => mem.cast::<u32>().as_ptr(),
- /// Err(AllocError) => return,
- /// };
- ///
- /// mem.write(1_000_000);
- ///
- /// Vec::from_raw_parts_in(mem, 1, 16, Global)
- /// };
- ///
- /// assert_eq!(vec, &[1_000_000]);
- /// assert_eq!(vec.capacity(), 16);
- /// }
- /// ```
- #[inline]
- #[unstable(feature = "allocator_api", issue = "32838")]
- pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
- unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } }
- }
-
- /// Decomposes a `Vec<T>` into its raw components.
- ///
- /// Returns the raw pointer to the underlying data, the length of
- /// the vector (in elements), and the allocated capacity of the
- /// data (in elements). These are the same arguments in the same
- /// order as the arguments to [`from_raw_parts`].
- ///
- /// After calling this function, the caller is responsible for the
- /// memory previously managed by the `Vec`. The only way to do
- /// this is to convert the raw pointer, length, and capacity back
- /// into a `Vec` with the [`from_raw_parts`] function, allowing
- /// the destructor to perform the cleanup.
- ///
- /// [`from_raw_parts`]: Vec::from_raw_parts
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(vec_into_raw_parts)]
- /// let v: Vec<i32> = vec![-1, 0, 1];
- ///
- /// let (ptr, len, cap) = v.into_raw_parts();
- ///
- /// let rebuilt = unsafe {
- /// // We can now make changes to the components, such as
- /// // transmuting the raw pointer to a compatible type.
- /// let ptr = ptr as *mut u32;
- ///
- /// Vec::from_raw_parts(ptr, len, cap)
- /// };
- /// assert_eq!(rebuilt, [4294967295, 0, 1]);
- /// ```
- #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
- pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
- let mut me = ManuallyDrop::new(self);
- (me.as_mut_ptr(), me.len(), me.capacity())
- }
-
- /// Decomposes a `Vec<T>` into its raw components.
- ///
- /// Returns the raw pointer to the underlying data, the length of the vector (in elements),
- /// the allocated capacity of the data (in elements), and the allocator. These are the same
- /// arguments in the same order as the arguments to [`from_raw_parts_in`].
- ///
- /// After calling this function, the caller is responsible for the
- /// memory previously managed by the `Vec`. The only way to do
- /// this is to convert the raw pointer, length, and capacity back
- /// into a `Vec` with the [`from_raw_parts_in`] function, allowing
- /// the destructor to perform the cleanup.
- ///
- /// [`from_raw_parts_in`]: Vec::from_raw_parts_in
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(allocator_api, vec_into_raw_parts)]
- ///
- /// use std::alloc::System;
- ///
- /// let mut v: Vec<i32, System> = Vec::new_in(System);
- /// v.push(-1);
- /// v.push(0);
- /// v.push(1);
- ///
- /// let (ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
- ///
- /// let rebuilt = unsafe {
- /// // We can now make changes to the components, such as
- /// // transmuting the raw pointer to a compatible type.
- /// let ptr = ptr as *mut u32;
- ///
- /// Vec::from_raw_parts_in(ptr, len, cap, alloc)
- /// };
- /// assert_eq!(rebuilt, [4294967295, 0, 1]);
- /// ```
- #[unstable(feature = "allocator_api", issue = "32838")]
- // #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
- pub fn into_raw_parts_with_alloc(self) -> (*mut T, usize, usize, A) {
- let mut me = ManuallyDrop::new(self);
- let len = me.len();
- let capacity = me.capacity();
- let ptr = me.as_mut_ptr();
- let alloc = unsafe { ptr::read(me.allocator()) };
- (ptr, len, capacity, alloc)
- }
-
- /// Returns the total number of elements the vector can hold without
- /// reallocating.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec: Vec<i32> = Vec::with_capacity(10);
- /// vec.push(42);
- /// assert!(vec.capacity() >= 10);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn capacity(&self) -> usize {
- self.buf.capacity()
- }
-
- /// Reserves capacity for at least `additional` more elements to be inserted
- /// in the given `Vec<T>`. The collection may reserve more space to
- /// speculatively avoid frequent reallocations. After calling `reserve`,
- /// capacity will be greater than or equal to `self.len() + additional`.
- /// Does nothing if capacity is already sufficient.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1];
- /// vec.reserve(10);
- /// assert!(vec.capacity() >= 11);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn reserve(&mut self, additional: usize) {
- self.buf.reserve(self.len, additional);
- }
-
- /// Reserves the minimum capacity for at least `additional` more elements to
- /// be inserted in the given `Vec<T>`. Unlike [`reserve`], this will not
- /// deliberately over-allocate to speculatively avoid frequent allocations.
- /// After calling `reserve_exact`, capacity will be greater than or equal to
- /// `self.len() + additional`. Does nothing if the capacity is already
- /// sufficient.
- ///
- /// Note that the allocator may give the collection more space than it
- /// requests. Therefore, capacity can not be relied upon to be precisely
- /// minimal. Prefer [`reserve`] if future insertions are expected.
- ///
- /// [`reserve`]: Vec::reserve
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1];
- /// vec.reserve_exact(10);
- /// assert!(vec.capacity() >= 11);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn reserve_exact(&mut self, additional: usize) {
- self.buf.reserve_exact(self.len, additional);
- }
-
- /// Tries to reserve capacity for at least `additional` more elements to be inserted
- /// in the given `Vec<T>`. The collection may reserve more space to speculatively avoid
- /// frequent reallocations. After calling `try_reserve`, capacity will be
- /// greater than or equal to `self.len() + additional` if it returns
- /// `Ok(())`. Does nothing if capacity is already sufficient. This method
- /// preserves the contents even if an error occurs.
- ///
- /// # Errors
- ///
- /// If the capacity overflows, or the allocator reports a failure, then an error
- /// is returned.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::TryReserveError;
- ///
- /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
- /// let mut output = Vec::new();
- ///
- /// // Pre-reserve the memory, exiting if we can't
- /// output.try_reserve(data.len())?;
- ///
- /// // Now we know this can't OOM in the middle of our complex work
- /// output.extend(data.iter().map(|&val| {
- /// val * 2 + 5 // very complicated
- /// }));
- ///
- /// Ok(output)
- /// }
- /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
- /// ```
- #[stable(feature = "try_reserve", since = "1.57.0")]
- pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
- self.buf.try_reserve(self.len, additional)
- }
-
- /// Tries to reserve the minimum capacity for at least `additional`
- /// elements to be inserted in the given `Vec<T>`. Unlike [`try_reserve`],
- /// this will not deliberately over-allocate to speculatively avoid frequent
- /// allocations. After calling `try_reserve_exact`, capacity will be greater
- /// than or equal to `self.len() + additional` if it returns `Ok(())`.
- /// Does nothing if the capacity is already sufficient.
- ///
- /// Note that the allocator may give the collection more space than it
- /// requests. Therefore, capacity can not be relied upon to be precisely
- /// minimal. Prefer [`try_reserve`] if future insertions are expected.
- ///
- /// [`try_reserve`]: Vec::try_reserve
- ///
- /// # Errors
- ///
- /// If the capacity overflows, or the allocator reports a failure, then an error
- /// is returned.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::TryReserveError;
- ///
- /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
- /// let mut output = Vec::new();
- ///
- /// // Pre-reserve the memory, exiting if we can't
- /// output.try_reserve_exact(data.len())?;
- ///
- /// // Now we know this can't OOM in the middle of our complex work
- /// output.extend(data.iter().map(|&val| {
- /// val * 2 + 5 // very complicated
- /// }));
- ///
- /// Ok(output)
- /// }
- /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
- /// ```
- #[stable(feature = "try_reserve", since = "1.57.0")]
- pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
- self.buf.try_reserve_exact(self.len, additional)
- }
-
- /// Shrinks the capacity of the vector as much as possible.
- ///
- /// It will drop down as close as possible to the length but the allocator
- /// may still inform the vector that there is space for a few more elements.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = Vec::with_capacity(10);
- /// vec.extend([1, 2, 3]);
- /// assert!(vec.capacity() >= 10);
- /// vec.shrink_to_fit();
- /// assert!(vec.capacity() >= 3);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn shrink_to_fit(&mut self) {
- // The capacity is never less than the length, and there's nothing to do when
- // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit`
- // by only calling it with a greater capacity.
- if self.capacity() > self.len {
- self.buf.shrink_to_fit(self.len);
- }
- }
-
- /// Shrinks the capacity of the vector with a lower bound.
- ///
- /// The capacity will remain at least as large as both the length
- /// and the supplied value.
- ///
- /// If the current capacity is less than the lower limit, this is a no-op.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = Vec::with_capacity(10);
- /// vec.extend([1, 2, 3]);
- /// assert!(vec.capacity() >= 10);
- /// vec.shrink_to(4);
- /// assert!(vec.capacity() >= 4);
- /// vec.shrink_to(0);
- /// assert!(vec.capacity() >= 3);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "shrink_to", since = "1.56.0")]
- pub fn shrink_to(&mut self, min_capacity: usize) {
- if self.capacity() > min_capacity {
- self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
- }
- }
-
- /// Converts the vector into [`Box<[T]>`][owned slice].
- ///
- /// If the vector has excess capacity, its items will be moved into a
- /// newly-allocated buffer with exactly the right capacity.
- ///
- /// [owned slice]: Box
- ///
- /// # Examples
- ///
- /// ```
- /// let v = vec![1, 2, 3];
- ///
- /// let slice = v.into_boxed_slice();
- /// ```
- ///
- /// Any excess capacity is removed:
- ///
- /// ```
- /// let mut vec = Vec::with_capacity(10);
- /// vec.extend([1, 2, 3]);
- ///
- /// assert!(vec.capacity() >= 10);
- /// let slice = vec.into_boxed_slice();
- /// assert_eq!(slice.into_vec().capacity(), 3);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn into_boxed_slice(mut self) -> Box<[T], A> {
- unsafe {
- self.shrink_to_fit();
- let me = ManuallyDrop::new(self);
- let buf = ptr::read(&me.buf);
- let len = me.len();
- buf.into_box(len).assume_init()
- }
- }
-
- /// Shortens the vector, keeping the first `len` elements and dropping
- /// the rest.
- ///
- /// If `len` is greater or equal to the vector's current length, this has
- /// no effect.
- ///
- /// The [`drain`] method can emulate `truncate`, but causes the excess
- /// elements to be returned instead of dropped.
- ///
- /// Note that this method has no effect on the allocated capacity
- /// of the vector.
- ///
- /// # Examples
- ///
- /// Truncating a five element vector to two elements:
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3, 4, 5];
- /// vec.truncate(2);
- /// assert_eq!(vec, [1, 2]);
- /// ```
- ///
- /// No truncation occurs when `len` is greater than the vector's current
- /// length:
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3];
- /// vec.truncate(8);
- /// assert_eq!(vec, [1, 2, 3]);
- /// ```
- ///
- /// Truncating when `len == 0` is equivalent to calling the [`clear`]
- /// method.
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3];
- /// vec.truncate(0);
- /// assert_eq!(vec, []);
- /// ```
- ///
- /// [`clear`]: Vec::clear
- /// [`drain`]: Vec::drain
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn truncate(&mut self, len: usize) {
- // This is safe because:
- //
- // * the slice passed to `drop_in_place` is valid; the `len > self.len`
- // case avoids creating an invalid slice, and
- // * the `len` of the vector is shrunk before calling `drop_in_place`,
- // such that no value will be dropped twice in case `drop_in_place`
- // were to panic once (if it panics twice, the program aborts).
- unsafe {
- // Note: It's intentional that this is `>` and not `>=`.
- // Changing it to `>=` has negative performance
- // implications in some cases. See #78884 for more.
- if len > self.len {
- return;
- }
- let remaining_len = self.len - len;
- let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
- self.len = len;
- ptr::drop_in_place(s);
- }
- }
-
- /// Extracts a slice containing the entire vector.
- ///
- /// Equivalent to `&s[..]`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::io::{self, Write};
- /// let buffer = vec![1, 2, 3, 5, 8];
- /// io::sink().write(buffer.as_slice()).unwrap();
- /// ```
- #[inline]
- #[stable(feature = "vec_as_slice", since = "1.7.0")]
- pub fn as_slice(&self) -> &[T] {
- self
- }
-
- /// Extracts a mutable slice of the entire vector.
- ///
- /// Equivalent to `&mut s[..]`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::io::{self, Read};
- /// let mut buffer = vec![0; 3];
- /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap();
- /// ```
- #[inline]
- #[stable(feature = "vec_as_slice", since = "1.7.0")]
- pub fn as_mut_slice(&mut self) -> &mut [T] {
- self
- }
-
- /// Returns a raw pointer to the vector's buffer, or a dangling raw pointer
- /// valid for zero sized reads if the vector didn't allocate.
- ///
- /// The caller must ensure that the vector outlives the pointer this
- /// function returns, or else it will end up pointing to garbage.
- /// Modifying the vector may cause its buffer to be reallocated,
- /// which would also make any pointers to it invalid.
- ///
- /// The caller must also ensure that the memory the pointer (non-transitively) points to
- /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
- /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
- ///
- /// This method guarantees that for the purpose of the aliasing model, this method
- /// does not materialize a reference to the underlying slice, and thus the returned pointer
- /// will remain valid when mixed with other calls to [`as_ptr`] and [`as_mut_ptr`].
- /// Note that calling other methods that materialize mutable references to the slice,
- /// or mutable references to specific elements you are planning on accessing through this pointer,
- /// as well as writing to those elements, may still invalidate this pointer.
- /// See the second example below for how this guarantee can be used.
- ///
- ///
- /// # Examples
- ///
- /// ```
- /// let x = vec![1, 2, 4];
- /// let x_ptr = x.as_ptr();
- ///
- /// unsafe {
- /// for i in 0..x.len() {
- /// assert_eq!(*x_ptr.add(i), 1 << i);
- /// }
- /// }
- /// ```
- ///
- /// Due to the aliasing guarantee, the following code is legal:
- ///
- /// ```rust
- /// unsafe {
- /// let mut v = vec![0, 1, 2];
- /// let ptr1 = v.as_ptr();
- /// let _ = ptr1.read();
- /// let ptr2 = v.as_mut_ptr().offset(2);
- /// ptr2.write(2);
- /// // Notably, the write to `ptr2` did *not* invalidate `ptr1`
- /// // because it mutated a different element:
- /// let _ = ptr1.read();
- /// }
- /// ```
- ///
- /// [`as_mut_ptr`]: Vec::as_mut_ptr
- /// [`as_ptr`]: Vec::as_ptr
- #[stable(feature = "vec_as_ptr", since = "1.37.0")]
- #[rustc_never_returns_null_ptr]
- #[inline]
- pub fn as_ptr(&self) -> *const T {
- // We shadow the slice method of the same name to avoid going through
- // `deref`, which creates an intermediate reference.
- self.buf.ptr()
- }
-
- /// Returns an unsafe mutable pointer to the vector's buffer, or a dangling
- /// raw pointer valid for zero sized reads if the vector didn't allocate.
- ///
- /// The caller must ensure that the vector outlives the pointer this
- /// function returns, or else it will end up pointing to garbage.
- /// Modifying the vector may cause its buffer to be reallocated,
- /// which would also make any pointers to it invalid.
- ///
- /// This method guarantees that for the purpose of the aliasing model, this method
- /// does not materialize a reference to the underlying slice, and thus the returned pointer
- /// will remain valid when mixed with other calls to [`as_ptr`] and [`as_mut_ptr`].
- /// Note that calling other methods that materialize references to the slice,
- /// or references to specific elements you are planning on accessing through this pointer,
- /// may still invalidate this pointer.
- /// See the second example below for how this guarantee can be used.
- ///
- ///
- /// # Examples
- ///
- /// ```
- /// // Allocate vector big enough for 4 elements.
- /// let size = 4;
- /// let mut x: Vec<i32> = Vec::with_capacity(size);
- /// let x_ptr = x.as_mut_ptr();
- ///
- /// // Initialize elements via raw pointer writes, then set length.
- /// unsafe {
- /// for i in 0..size {
- /// *x_ptr.add(i) = i as i32;
- /// }
- /// x.set_len(size);
- /// }
- /// assert_eq!(&*x, &[0, 1, 2, 3]);
- /// ```
- ///
- /// Due to the aliasing guarantee, the following code is legal:
- ///
- /// ```rust
- /// unsafe {
- /// let mut v = vec![0];
- /// let ptr1 = v.as_mut_ptr();
- /// ptr1.write(1);
- /// let ptr2 = v.as_mut_ptr();
- /// ptr2.write(2);
- /// // Notably, the write to `ptr2` did *not* invalidate `ptr1`:
- /// ptr1.write(3);
- /// }
- /// ```
- ///
- /// [`as_mut_ptr`]: Vec::as_mut_ptr
- /// [`as_ptr`]: Vec::as_ptr
- #[stable(feature = "vec_as_ptr", since = "1.37.0")]
- #[rustc_never_returns_null_ptr]
- #[inline]
- pub fn as_mut_ptr(&mut self) -> *mut T {
- // We shadow the slice method of the same name to avoid going through
- // `deref_mut`, which creates an intermediate reference.
- self.buf.ptr()
- }
-
- /// Returns a reference to the underlying allocator.
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn allocator(&self) -> &A {
- self.buf.allocator()
- }
-
- /// Forces the length of the vector to `new_len`.
- ///
- /// This is a low-level operation that maintains none of the normal
- /// invariants of the type. Normally changing the length of a vector
- /// is done using one of the safe operations instead, such as
- /// [`truncate`], [`resize`], [`extend`], or [`clear`].
- ///
- /// [`truncate`]: Vec::truncate
- /// [`resize`]: Vec::resize
- /// [`extend`]: Extend::extend
- /// [`clear`]: Vec::clear
- ///
- /// # Safety
- ///
- /// - `new_len` must be less than or equal to [`capacity()`].
- /// - The elements at `old_len..new_len` must be initialized.
- ///
- /// [`capacity()`]: Vec::capacity
- ///
- /// # Examples
- ///
- /// This method can be useful for situations in which the vector
- /// is serving as a buffer for other code, particularly over FFI:
- ///
- /// ```no_run
- /// # #![allow(dead_code)]
- /// # // This is just a minimal skeleton for the doc example;
- /// # // don't use this as a starting point for a real library.
- /// # pub struct StreamWrapper { strm: *mut std::ffi::c_void }
- /// # const Z_OK: i32 = 0;
- /// # extern "C" {
- /// # fn deflateGetDictionary(
- /// # strm: *mut std::ffi::c_void,
- /// # dictionary: *mut u8,
- /// # dictLength: *mut usize,
- /// # ) -> i32;
- /// # }
- /// # impl StreamWrapper {
- /// pub fn get_dictionary(&self) -> Option<Vec<u8>> {
- /// // Per the FFI method's docs, "32768 bytes is always enough".
- /// let mut dict = Vec::with_capacity(32_768);
- /// let mut dict_length = 0;
- /// // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that:
- /// // 1. `dict_length` elements were initialized.
- /// // 2. `dict_length` <= the capacity (32_768)
- /// // which makes `set_len` safe to call.
- /// unsafe {
- /// // Make the FFI call...
- /// let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length);
- /// if r == Z_OK {
- /// // ...and update the length to what was initialized.
- /// dict.set_len(dict_length);
- /// Some(dict)
- /// } else {
- /// None
- /// }
- /// }
- /// }
- /// # }
- /// ```
- ///
- /// While the following example is sound, there is a memory leak since
- /// the inner vectors were not freed prior to the `set_len` call:
- ///
- /// ```
- /// let mut vec = vec![vec![1, 0, 0],
- /// vec![0, 1, 0],
- /// vec![0, 0, 1]];
- /// // SAFETY:
- /// // 1. `old_len..0` is empty so no elements need to be initialized.
- /// // 2. `0 <= capacity` always holds whatever `capacity` is.
- /// unsafe {
- /// vec.set_len(0);
- /// }
- /// ```
- ///
- /// Normally, here, one would use [`clear`] instead to correctly drop
- /// the contents and thus not leak memory.
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub unsafe fn set_len(&mut self, new_len: usize) {
- debug_assert!(new_len <= self.capacity());
-
- self.len = new_len;
- }
-
- /// Removes an element from the vector and returns it.
- ///
- /// The removed element is replaced by the last element of the vector.
- ///
- /// This does not preserve ordering, but is *O*(1).
- /// If you need to preserve the element order, use [`remove`] instead.
- ///
- /// [`remove`]: Vec::remove
- ///
- /// # Panics
- ///
- /// Panics if `index` is out of bounds.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = vec!["foo", "bar", "baz", "qux"];
- ///
- /// assert_eq!(v.swap_remove(1), "bar");
- /// assert_eq!(v, ["foo", "qux", "baz"]);
- ///
- /// assert_eq!(v.swap_remove(0), "foo");
- /// assert_eq!(v, ["baz", "qux"]);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn swap_remove(&mut self, index: usize) -> T {
- #[cold]
- #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
- #[track_caller]
- fn assert_failed(index: usize, len: usize) -> ! {
- panic!("swap_remove index (is {index}) should be < len (is {len})");
- }
-
- let len = self.len();
- if index >= len {
- assert_failed(index, len);
- }
- unsafe {
- // We replace self[index] with the last element. Note that if the
- // bounds check above succeeds there must be a last element (which
- // can be self[index] itself).
- let value = ptr::read(self.as_ptr().add(index));
- let base_ptr = self.as_mut_ptr();
- ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1);
- self.set_len(len - 1);
- value
- }
- }
-
- /// Inserts an element at position `index` within the vector, shifting all
- /// elements after it to the right.
- ///
- /// # Panics
- ///
- /// Panics if `index > len`.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3];
- /// vec.insert(1, 4);
- /// assert_eq!(vec, [1, 4, 2, 3]);
- /// vec.insert(4, 5);
- /// assert_eq!(vec, [1, 4, 2, 3, 5]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(&mut self, index: usize, element: T) {
- #[cold]
- #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
- #[track_caller]
- fn assert_failed(index: usize, len: usize) -> ! {
- panic!("insertion index (is {index}) should be <= len (is {len})");
- }
-
- let len = self.len();
-
- // space for the new element
- if len == self.buf.capacity() {
- self.reserve(1);
- }
-
- unsafe {
- // infallible
- // The spot to put the new value
- {
- let p = self.as_mut_ptr().add(index);
- if index < len {
- // Shift everything over to make space. (Duplicating the
- // `index`th element into two consecutive places.)
- ptr::copy(p, p.add(1), len - index);
- } else if index == len {
- // No elements need shifting.
- } else {
- assert_failed(index, len);
- }
- // Write it in, overwriting the first copy of the `index`th
- // element.
- ptr::write(p, element);
- }
- self.set_len(len + 1);
- }
- }
-
- /// Removes and returns the element at position `index` within the vector,
- /// shifting all elements after it to the left.
- ///
- /// Note: Because this shifts over the remaining elements, it has a
- /// worst-case performance of *O*(*n*). If you don't need the order of elements
- /// to be preserved, use [`swap_remove`] instead. If you'd like to remove
- /// elements from the beginning of the `Vec`, consider using
- /// [`VecDeque::pop_front`] instead.
- ///
- /// [`swap_remove`]: Vec::swap_remove
- /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front
- ///
- /// # Panics
- ///
- /// Panics if `index` is out of bounds.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = vec![1, 2, 3];
- /// assert_eq!(v.remove(1), 2);
- /// assert_eq!(v, [1, 3]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- #[track_caller]
- pub fn remove(&mut self, index: usize) -> T {
- #[cold]
- #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
- #[track_caller]
- fn assert_failed(index: usize, len: usize) -> ! {
- panic!("removal index (is {index}) should be < len (is {len})");
- }
-
- let len = self.len();
- if index >= len {
- assert_failed(index, len);
- }
- unsafe {
- // infallible
- let ret;
- {
- // the place we are taking from.
- let ptr = self.as_mut_ptr().add(index);
- // copy it out, unsafely having a copy of the value on
- // the stack and in the vector at the same time.
- ret = ptr::read(ptr);
-
- // Shift everything down to fill in that spot.
- ptr::copy(ptr.add(1), ptr, len - index - 1);
- }
- self.set_len(len - 1);
- ret
- }
- }
-
- /// Retains only the elements specified by the predicate.
- ///
- /// In other words, remove all elements `e` for which `f(&e)` returns `false`.
- /// This method operates in place, visiting each element exactly once in the
- /// original order, and preserves the order of the retained elements.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3, 4];
- /// vec.retain(|&x| x % 2 == 0);
- /// assert_eq!(vec, [2, 4]);
- /// ```
- ///
- /// Because the elements are visited exactly once in the original order,
- /// external state may be used to decide which elements to keep.
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3, 4, 5];
- /// let keep = [false, true, true, false, true];
- /// let mut iter = keep.iter();
- /// vec.retain(|_| *iter.next().unwrap());
- /// assert_eq!(vec, [2, 3, 5]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn retain<F>(&mut self, mut f: F)
- where
- F: FnMut(&T) -> bool,
- {
- self.retain_mut(|elem| f(elem));
- }
-
- /// Retains only the elements specified by the predicate, passing a mutable reference to it.
- ///
- /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`.
- /// This method operates in place, visiting each element exactly once in the
- /// original order, and preserves the order of the retained elements.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3, 4];
- /// vec.retain_mut(|x| if *x <= 3 {
- /// *x += 1;
- /// true
- /// } else {
- /// false
- /// });
- /// assert_eq!(vec, [2, 3, 4]);
- /// ```
- #[stable(feature = "vec_retain_mut", since = "1.61.0")]
- pub fn retain_mut<F>(&mut self, mut f: F)
- where
- F: FnMut(&mut T) -> bool,
- {
- let original_len = self.len();
- // Avoid double drop if the drop guard is not executed,
- // since we may make some holes during the process.
- unsafe { self.set_len(0) };
-
- // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked]
- // |<- processed len ->| ^- next to check
- // |<- deleted cnt ->|
- // |<- original_len ->|
- // Kept: Elements which predicate returns true on.
- // Hole: Moved or dropped element slot.
- // Unchecked: Unchecked valid elements.
- //
- // This drop guard will be invoked when predicate or `drop` of element panicked.
- // It shifts unchecked elements to cover holes and `set_len` to the correct length.
- // In cases when predicate and `drop` never panick, it will be optimized out.
- struct BackshiftOnDrop<'a, T, A: Allocator> {
- v: &'a mut Vec<T, A>,
- processed_len: usize,
- deleted_cnt: usize,
- original_len: usize,
- }
-
- impl<T, A: Allocator> Drop for BackshiftOnDrop<'_, T, A> {
- fn drop(&mut self) {
- if self.deleted_cnt > 0 {
- // SAFETY: Trailing unchecked items must be valid since we never touch them.
- unsafe {
- ptr::copy(
- self.v.as_ptr().add(self.processed_len),
- self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt),
- self.original_len - self.processed_len,
- );
- }
- }
- // SAFETY: After filling holes, all items are in contiguous memory.
- unsafe {
- self.v.set_len(self.original_len - self.deleted_cnt);
- }
- }
- }
-
- let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len };
-
- fn process_loop<F, T, A: Allocator, const DELETED: bool>(
- original_len: usize,
- f: &mut F,
- g: &mut BackshiftOnDrop<'_, T, A>,
- ) where
- F: FnMut(&mut T) -> bool,
- {
- while g.processed_len != original_len {
- // SAFETY: Unchecked element must be valid.
- let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) };
- if !f(cur) {
- // Advance early to avoid double drop if `drop_in_place` panicked.
- g.processed_len += 1;
- g.deleted_cnt += 1;
- // SAFETY: We never touch this element again after dropped.
- unsafe { ptr::drop_in_place(cur) };
- // We already advanced the counter.
- if DELETED {
- continue;
- } else {
- break;
- }
- }
- if DELETED {
- // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element.
- // We use copy for move, and never touch this element again.
- unsafe {
- let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt);
- ptr::copy_nonoverlapping(cur, hole_slot, 1);
- }
- }
- g.processed_len += 1;
- }
- }
-
- // Stage 1: Nothing was deleted.
- process_loop::<F, T, A, false>(original_len, &mut f, &mut g);
-
- // Stage 2: Some elements were deleted.
- process_loop::<F, T, A, true>(original_len, &mut f, &mut g);
-
- // All item are processed. This can be optimized to `set_len` by LLVM.
- drop(g);
- }
-
- /// Removes all but the first of consecutive elements in the vector that resolve to the same
- /// key.
- ///
- /// If the vector is sorted, this removes all duplicates.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![10, 20, 21, 30, 20];
- ///
- /// vec.dedup_by_key(|i| *i / 10);
- ///
- /// assert_eq!(vec, [10, 20, 30, 20]);
- /// ```
- #[stable(feature = "dedup_by", since = "1.16.0")]
- #[inline]
- pub fn dedup_by_key<F, K>(&mut self, mut key: F)
- where
- F: FnMut(&mut T) -> K,
- K: PartialEq,
- {
- self.dedup_by(|a, b| key(a) == key(b))
- }
-
- /// Removes all but the first of consecutive elements in the vector satisfying a given equality
- /// relation.
- ///
- /// The `same_bucket` function is passed references to two elements from the vector and
- /// must determine if the elements compare equal. The elements are passed in opposite order
- /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed.
- ///
- /// If the vector is sorted, this removes all duplicates.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"];
- ///
- /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
- ///
- /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
- /// ```
- #[stable(feature = "dedup_by", since = "1.16.0")]
- pub fn dedup_by<F>(&mut self, mut same_bucket: F)
- where
- F: FnMut(&mut T, &mut T) -> bool,
- {
- let len = self.len();
- if len <= 1 {
- return;
- }
-
- // Check if we ever want to remove anything.
- // This allows to use copy_non_overlapping in next cycle.
- // And avoids any memory writes if we don't need to remove anything.
- let mut first_duplicate_idx: usize = 1;
- let start = self.as_mut_ptr();
- while first_duplicate_idx != len {
- let found_duplicate = unsafe {
- // SAFETY: first_duplicate always in range [1..len)
- // Note that we start iteration from 1 so we never overflow.
- let prev = start.add(first_duplicate_idx.wrapping_sub(1));
- let current = start.add(first_duplicate_idx);
- // We explicitly say in docs that references are reversed.
- same_bucket(&mut *current, &mut *prev)
- };
- if found_duplicate {
- break;
- }
- first_duplicate_idx += 1;
- }
- // Don't need to remove anything.
- // We cannot get bigger than len.
- if first_duplicate_idx == len {
- return;
- }
-
- /* INVARIANT: vec.len() > read > write > write-1 >= 0 */
- struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> {
- /* Offset of the element we want to check if it is duplicate */
- read: usize,
-
- /* Offset of the place where we want to place the non-duplicate
- * when we find it. */
- write: usize,
-
- /* The Vec that would need correction if `same_bucket` panicked */
- vec: &'a mut Vec<T, A>,
- }
-
- impl<'a, T, A: core::alloc::Allocator> Drop for FillGapOnDrop<'a, T, A> {
- fn drop(&mut self) {
- /* This code gets executed when `same_bucket` panics */
-
- /* SAFETY: invariant guarantees that `read - write`
- * and `len - read` never overflow and that the copy is always
- * in-bounds. */
- unsafe {
- let ptr = self.vec.as_mut_ptr();
- let len = self.vec.len();
-
- /* How many items were left when `same_bucket` panicked.
- * Basically vec[read..].len() */
- let items_left = len.wrapping_sub(self.read);
-
- /* Pointer to first item in vec[write..write+items_left] slice */
- let dropped_ptr = ptr.add(self.write);
- /* Pointer to first item in vec[read..] slice */
- let valid_ptr = ptr.add(self.read);
-
- /* Copy `vec[read..]` to `vec[write..write+items_left]`.
- * The slices can overlap, so `copy_nonoverlapping` cannot be used */
- ptr::copy(valid_ptr, dropped_ptr, items_left);
-
- /* How many items have been already dropped
- * Basically vec[read..write].len() */
- let dropped = self.read.wrapping_sub(self.write);
-
- self.vec.set_len(len - dropped);
- }
- }
- }
-
- /* Drop items while going through Vec, it should be more efficient than
- * doing slice partition_dedup + truncate */
-
- // Construct gap first and then drop item to avoid memory corruption if `T::drop` panics.
- let mut gap =
- FillGapOnDrop { read: first_duplicate_idx + 1, write: first_duplicate_idx, vec: self };
- unsafe {
- // SAFETY: we checked that first_duplicate_idx in bounds before.
- // If drop panics, `gap` would remove this item without drop.
- ptr::drop_in_place(start.add(first_duplicate_idx));
- }
-
- /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr
- * are always in-bounds and read_ptr never aliases prev_ptr */
- unsafe {
- while gap.read < len {
- let read_ptr = start.add(gap.read);
- let prev_ptr = start.add(gap.write.wrapping_sub(1));
-
- // We explicitly say in docs that references are reversed.
- let found_duplicate = same_bucket(&mut *read_ptr, &mut *prev_ptr);
- if found_duplicate {
- // Increase `gap.read` now since the drop may panic.
- gap.read += 1;
- /* We have found duplicate, drop it in-place */
- ptr::drop_in_place(read_ptr);
- } else {
- let write_ptr = start.add(gap.write);
-
- /* read_ptr cannot be equal to write_ptr because at this point
- * we guaranteed to skip at least one element (before loop starts).
- */
- ptr::copy_nonoverlapping(read_ptr, write_ptr, 1);
-
- /* We have filled that place, so go further */
- gap.write += 1;
- gap.read += 1;
- }
- }
-
- /* Technically we could let `gap` clean up with its Drop, but
- * when `same_bucket` is guaranteed to not panic, this bloats a little
- * the codegen, so we just do it manually */
- gap.vec.set_len(gap.write);
- mem::forget(gap);
- }
- }
-
- /// Appends an element to the back of a collection.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2];
- /// vec.push(3);
- /// assert_eq!(vec, [1, 2, 3]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn push(&mut self, value: T) {
- // This will panic or abort if we would allocate > isize::MAX bytes
- // or if the length increment would overflow for zero-sized types.
- if self.len == self.buf.capacity() {
- self.buf.reserve_for_push(self.len);
- }
- unsafe {
- let end = self.as_mut_ptr().add(self.len);
- ptr::write(end, value);
- self.len += 1;
- }
- }
-
- /// Tries to append an element to the back of a collection.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2];
- /// vec.try_push(3).unwrap();
- /// assert_eq!(vec, [1, 2, 3]);
- /// ```
- #[inline]
- #[stable(feature = "kernel", since = "1.0.0")]
- pub fn try_push(&mut self, value: T) -> Result<(), TryReserveError> {
- if self.len == self.buf.capacity() {
- self.buf.try_reserve_for_push(self.len)?;
- }
- unsafe {
- let end = self.as_mut_ptr().add(self.len);
- ptr::write(end, value);
- self.len += 1;
- }
- Ok(())
- }
-
- /// Appends an element if there is sufficient spare capacity, otherwise an error is returned
- /// with the element.
- ///
- /// Unlike [`push`] this method will not reallocate when there's insufficient capacity.
- /// The caller should use [`reserve`] or [`try_reserve`] to ensure that there is enough capacity.
- ///
- /// [`push`]: Vec::push
- /// [`reserve`]: Vec::reserve
- /// [`try_reserve`]: Vec::try_reserve
- ///
- /// # Examples
- ///
- /// A manual, panic-free alternative to [`FromIterator`]:
- ///
- /// ```
- /// #![feature(vec_push_within_capacity)]
- ///
- /// use std::collections::TryReserveError;
- /// fn from_iter_fallible<T>(iter: impl Iterator<Item=T>) -> Result<Vec<T>, TryReserveError> {
- /// let mut vec = Vec::new();
- /// for value in iter {
- /// if let Err(value) = vec.push_within_capacity(value) {
- /// vec.try_reserve(1)?;
- /// // this cannot fail, the previous line either returned or added at least 1 free slot
- /// let _ = vec.push_within_capacity(value);
- /// }
- /// }
- /// Ok(vec)
- /// }
- /// assert_eq!(from_iter_fallible(0..100), Ok(Vec::from_iter(0..100)));
- /// ```
- #[inline]
- #[unstable(feature = "vec_push_within_capacity", issue = "100486")]
- pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> {
- if self.len == self.buf.capacity() {
- return Err(value);
- }
- unsafe {
- let end = self.as_mut_ptr().add(self.len);
- ptr::write(end, value);
- self.len += 1;
- }
- Ok(())
- }
-
- /// Removes the last element from a vector and returns it, or [`None`] if it
- /// is empty.
- ///
- /// If you'd like to pop the first element, consider using
- /// [`VecDeque::pop_front`] instead.
- ///
- /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3];
- /// assert_eq!(vec.pop(), Some(3));
- /// assert_eq!(vec, [1, 2]);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn pop(&mut self) -> Option<T> {
- if self.len == 0 {
- None
- } else {
- unsafe {
- self.len -= 1;
- core::intrinsics::assume(self.len < self.capacity());
- Some(ptr::read(self.as_ptr().add(self.len())))
- }
- }
- }
-
- /// Moves all the elements of `other` into `self`, leaving `other` empty.
- ///
- /// # Panics
- ///
- /// Panics if the new capacity exceeds `isize::MAX` bytes.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3];
- /// let mut vec2 = vec![4, 5, 6];
- /// vec.append(&mut vec2);
- /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
- /// assert_eq!(vec2, []);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- #[stable(feature = "append", since = "1.4.0")]
- pub fn append(&mut self, other: &mut Self) {
- unsafe {
- self.append_elements(other.as_slice() as _);
- other.set_len(0);
- }
- }
-
- /// Appends elements to `self` from other buffer.
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- unsafe fn append_elements(&mut self, other: *const [T]) {
- let count = unsafe { (*other).len() };
- self.reserve(count);
- let len = self.len();
- unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
- self.len += count;
- }
-
- /// Tries to append elements to `self` from other buffer.
- #[inline]
- unsafe fn try_append_elements(&mut self, other: *const [T]) -> Result<(), TryReserveError> {
- let count = unsafe { (*other).len() };
- self.try_reserve(count)?;
- let len = self.len();
- unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
- self.len += count;
- Ok(())
- }
-
- /// Removes the specified range from the vector in bulk, returning all
- /// removed elements as an iterator. If the iterator is dropped before
- /// being fully consumed, it drops the remaining removed elements.
- ///
- /// The returned iterator keeps a mutable borrow on the vector to optimize
- /// its implementation.
- ///
- /// # Panics
- ///
- /// Panics if the starting point is greater than the end point or if
- /// the end point is greater than the length of the vector.
- ///
- /// # Leaking
- ///
- /// If the returned iterator goes out of scope without being dropped (due to
- /// [`mem::forget`], for example), the vector may have lost and leaked
- /// elements arbitrarily, including elements outside the range.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = vec![1, 2, 3];
- /// let u: Vec<_> = v.drain(1..).collect();
- /// assert_eq!(v, &[1]);
- /// assert_eq!(u, &[2, 3]);
- ///
- /// // A full range clears the vector, like `clear()` does
- /// v.drain(..);
- /// assert_eq!(v, &[]);
- /// ```
- #[stable(feature = "drain", since = "1.6.0")]
- pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, A>
- where
- R: RangeBounds<usize>,
- {
- // Memory safety
- //
- // When the Drain is first created, it shortens the length of
- // the source vector to make sure no uninitialized or moved-from elements
- // are accessible at all if the Drain's destructor never gets to run.
- //
- // Drain will ptr::read out the values to remove.
- // When finished, remaining tail of the vec is copied back to cover
- // the hole, and the vector length is restored to the new length.
- //
- let len = self.len();
- let Range { start, end } = slice::range(range, ..len);
-
- unsafe {
- // set self.vec length's to start, to be safe in case Drain is leaked
- self.set_len(start);
- let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start);
- Drain {
- tail_start: end,
- tail_len: len - end,
- iter: range_slice.iter(),
- vec: NonNull::from(self),
- }
- }
- }
-
- /// Clears the vector, removing all values.
- ///
- /// Note that this method has no effect on the allocated capacity
- /// of the vector.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = vec![1, 2, 3];
- ///
- /// v.clear();
- ///
- /// assert!(v.is_empty());
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn clear(&mut self) {
- let elems: *mut [T] = self.as_mut_slice();
-
- // SAFETY:
- // - `elems` comes directly from `as_mut_slice` and is therefore valid.
- // - Setting `self.len` before calling `drop_in_place` means that,
- // if an element's `Drop` impl panics, the vector's `Drop` impl will
- // do nothing (leaking the rest of the elements) instead of dropping
- // some twice.
- unsafe {
- self.len = 0;
- ptr::drop_in_place(elems);
- }
- }
-
- /// Returns the number of elements in the vector, also referred to
- /// as its 'length'.
- ///
- /// # Examples
- ///
- /// ```
- /// let a = vec![1, 2, 3];
- /// assert_eq!(a.len(), 3);
- /// ```
- #[inline]
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn len(&self) -> usize {
- self.len
- }
-
- /// Returns `true` if the vector contains no elements.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = Vec::new();
- /// assert!(v.is_empty());
- ///
- /// v.push(1);
- /// assert!(!v.is_empty());
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_empty(&self) -> bool {
- self.len() == 0
- }
-
- /// Splits the collection into two at the given index.
- ///
- /// Returns a newly allocated vector containing the elements in the range
- /// `[at, len)`. After the call, the original vector will be left containing
- /// the elements `[0, at)` with its previous capacity unchanged.
- ///
- /// # Panics
- ///
- /// Panics if `at > len`.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3];
- /// let vec2 = vec.split_off(1);
- /// assert_eq!(vec, [1]);
- /// assert_eq!(vec2, [2, 3]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- #[must_use = "use `.truncate()` if you don't need the other half"]
- #[stable(feature = "split_off", since = "1.4.0")]
- pub fn split_off(&mut self, at: usize) -> Self
- where
- A: Clone,
- {
- #[cold]
- #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
- #[track_caller]
- fn assert_failed(at: usize, len: usize) -> ! {
- panic!("`at` split index (is {at}) should be <= len (is {len})");
- }
-
- if at > self.len() {
- assert_failed(at, self.len());
- }
-
- if at == 0 {
- // the new vector can take over the original buffer and avoid the copy
- return mem::replace(
- self,
- Vec::with_capacity_in(self.capacity(), self.allocator().clone()),
- );
- }
-
- let other_len = self.len - at;
- let mut other = Vec::with_capacity_in(other_len, self.allocator().clone());
-
- // Unsafely `set_len` and copy items to `other`.
- unsafe {
- self.set_len(at);
- other.set_len(other_len);
-
- ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
- }
- other
- }
-
- /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
- ///
- /// If `new_len` is greater than `len`, the `Vec` is extended by the
- /// difference, with each additional slot filled with the result of
- /// calling the closure `f`. The return values from `f` will end up
- /// in the `Vec` in the order they have been generated.
- ///
- /// If `new_len` is less than `len`, the `Vec` is simply truncated.
- ///
- /// This method uses a closure to create new values on every push. If
- /// you'd rather [`Clone`] a given value, use [`Vec::resize`]. If you
- /// want to use the [`Default`] trait to generate values, you can
- /// pass [`Default::default`] as the second argument.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 3];
- /// vec.resize_with(5, Default::default);
- /// assert_eq!(vec, [1, 2, 3, 0, 0]);
- ///
- /// let mut vec = vec![];
- /// let mut p = 1;
- /// vec.resize_with(4, || { p *= 2; p });
- /// assert_eq!(vec, [2, 4, 8, 16]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "vec_resize_with", since = "1.33.0")]
- pub fn resize_with<F>(&mut self, new_len: usize, f: F)
- where
- F: FnMut() -> T,
- {
- let len = self.len();
- if new_len > len {
- self.extend_trusted(iter::repeat_with(f).take(new_len - len));
- } else {
- self.truncate(new_len);
- }
- }
-
- /// Consumes and leaks the `Vec`, returning a mutable reference to the contents,
- /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime
- /// `'a`. If the type has only static references, or none at all, then this
- /// may be chosen to be `'static`.
- ///
- /// As of Rust 1.57, this method does not reallocate or shrink the `Vec`,
- /// so the leaked allocation may include unused capacity that is not part
- /// of the returned slice.
- ///
- /// This function is mainly useful for data that lives for the remainder of
- /// the program's life. Dropping the returned reference will cause a memory
- /// leak.
- ///
- /// # Examples
- ///
- /// Simple usage:
- ///
- /// ```
- /// let x = vec![1, 2, 3];
- /// let static_ref: &'static mut [usize] = x.leak();
- /// static_ref[0] += 1;
- /// assert_eq!(static_ref, &[2, 2, 3]);
- /// ```
- #[stable(feature = "vec_leak", since = "1.47.0")]
- #[inline]
- pub fn leak<'a>(self) -> &'a mut [T]
- where
- A: 'a,
- {
- let mut me = ManuallyDrop::new(self);
- unsafe { slice::from_raw_parts_mut(me.as_mut_ptr(), me.len) }
- }
-
- /// Returns the remaining spare capacity of the vector as a slice of
- /// `MaybeUninit<T>`.
- ///
- /// The returned slice can be used to fill the vector with data (e.g. by
- /// reading from a file) before marking the data as initialized using the
- /// [`set_len`] method.
- ///
- /// [`set_len`]: Vec::set_len
- ///
- /// # Examples
- ///
- /// ```
- /// // Allocate vector big enough for 10 elements.
- /// let mut v = Vec::with_capacity(10);
- ///
- /// // Fill in the first 3 elements.
- /// let uninit = v.spare_capacity_mut();
- /// uninit[0].write(0);
- /// uninit[1].write(1);
- /// uninit[2].write(2);
- ///
- /// // Mark the first 3 elements of the vector as being initialized.
- /// unsafe {
- /// v.set_len(3);
- /// }
- ///
- /// assert_eq!(&v, &[0, 1, 2]);
- /// ```
- #[stable(feature = "vec_spare_capacity", since = "1.60.0")]
- #[inline]
- pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
- // Note:
- // This method is not implemented in terms of `split_at_spare_mut`,
- // to prevent invalidation of pointers to the buffer.
- unsafe {
- slice::from_raw_parts_mut(
- self.as_mut_ptr().add(self.len) as *mut MaybeUninit<T>,
- self.buf.capacity() - self.len,
- )
- }
- }
-
- /// Returns vector content as a slice of `T`, along with the remaining spare
- /// capacity of the vector as a slice of `MaybeUninit<T>`.
- ///
- /// The returned spare capacity slice can be used to fill the vector with data
- /// (e.g. by reading from a file) before marking the data as initialized using
- /// the [`set_len`] method.
- ///
- /// [`set_len`]: Vec::set_len
- ///
- /// Note that this is a low-level API, which should be used with care for
- /// optimization purposes. If you need to append data to a `Vec`
- /// you can use [`push`], [`extend`], [`extend_from_slice`],
- /// [`extend_from_within`], [`insert`], [`append`], [`resize`] or
- /// [`resize_with`], depending on your exact needs.
- ///
- /// [`push`]: Vec::push
- /// [`extend`]: Vec::extend
- /// [`extend_from_slice`]: Vec::extend_from_slice
- /// [`extend_from_within`]: Vec::extend_from_within
- /// [`insert`]: Vec::insert
- /// [`append`]: Vec::append
- /// [`resize`]: Vec::resize
- /// [`resize_with`]: Vec::resize_with
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(vec_split_at_spare)]
- ///
- /// let mut v = vec![1, 1, 2];
- ///
- /// // Reserve additional space big enough for 10 elements.
- /// v.reserve(10);
- ///
- /// let (init, uninit) = v.split_at_spare_mut();
- /// let sum = init.iter().copied().sum::<u32>();
- ///
- /// // Fill in the next 4 elements.
- /// uninit[0].write(sum);
- /// uninit[1].write(sum * 2);
- /// uninit[2].write(sum * 3);
- /// uninit[3].write(sum * 4);
- ///
- /// // Mark the 4 elements of the vector as being initialized.
- /// unsafe {
- /// let len = v.len();
- /// v.set_len(len + 4);
- /// }
- ///
- /// assert_eq!(&v, &[1, 1, 2, 4, 8, 12, 16]);
- /// ```
- #[unstable(feature = "vec_split_at_spare", issue = "81944")]
- #[inline]
- pub fn split_at_spare_mut(&mut self) -> (&mut [T], &mut [MaybeUninit<T>]) {
- // SAFETY:
- // - len is ignored and so never changed
- let (init, spare, _) = unsafe { self.split_at_spare_mut_with_len() };
- (init, spare)
- }
-
- /// Safety: changing returned .2 (&mut usize) is considered the same as calling `.set_len(_)`.
- ///
- /// This method provides unique access to all vec parts at once in `extend_from_within`.
- unsafe fn split_at_spare_mut_with_len(
- &mut self,
- ) -> (&mut [T], &mut [MaybeUninit<T>], &mut usize) {
- let ptr = self.as_mut_ptr();
- // SAFETY:
- // - `ptr` is guaranteed to be valid for `self.len` elements
- // - but the allocation extends out to `self.buf.capacity()` elements, possibly
- // uninitialized
- let spare_ptr = unsafe { ptr.add(self.len) };
- let spare_ptr = spare_ptr.cast::<MaybeUninit<T>>();
- let spare_len = self.buf.capacity() - self.len;
-
- // SAFETY:
- // - `ptr` is guaranteed to be valid for `self.len` elements
- // - `spare_ptr` is pointing one element past the buffer, so it doesn't overlap with `initialized`
- unsafe {
- let initialized = slice::from_raw_parts_mut(ptr, self.len);
- let spare = slice::from_raw_parts_mut(spare_ptr, spare_len);
-
- (initialized, spare, &mut self.len)
- }
- }
-}
-
-impl<T: Clone, A: Allocator> Vec<T, A> {
- /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
- ///
- /// If `new_len` is greater than `len`, the `Vec` is extended by the
- /// difference, with each additional slot filled with `value`.
- /// If `new_len` is less than `len`, the `Vec` is simply truncated.
- ///
- /// This method requires `T` to implement [`Clone`],
- /// in order to be able to clone the passed value.
- /// If you need more flexibility (or want to rely on [`Default`] instead of
- /// [`Clone`]), use [`Vec::resize_with`].
- /// If you only need to resize to a smaller size, use [`Vec::truncate`].
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec!["hello"];
- /// vec.resize(3, "world");
- /// assert_eq!(vec, ["hello", "world", "world"]);
- ///
- /// let mut vec = vec![1, 2, 3, 4];
- /// vec.resize(2, 0);
- /// assert_eq!(vec, [1, 2]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "vec_resize", since = "1.5.0")]
- pub fn resize(&mut self, new_len: usize, value: T) {
- let len = self.len();
-
- if new_len > len {
- self.extend_with(new_len - len, value)
- } else {
- self.truncate(new_len);
- }
- }
-
- /// Tries to resize the `Vec` in-place so that `len` is equal to `new_len`.
- ///
- /// If `new_len` is greater than `len`, the `Vec` is extended by the
- /// difference, with each additional slot filled with `value`.
- /// If `new_len` is less than `len`, the `Vec` is simply truncated.
- ///
- /// This method requires `T` to implement [`Clone`],
- /// in order to be able to clone the passed value.
- /// If you need more flexibility (or want to rely on [`Default`] instead of
- /// [`Clone`]), use [`Vec::resize_with`].
- /// If you only need to resize to a smaller size, use [`Vec::truncate`].
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec!["hello"];
- /// vec.try_resize(3, "world").unwrap();
- /// assert_eq!(vec, ["hello", "world", "world"]);
- ///
- /// let mut vec = vec![1, 2, 3, 4];
- /// vec.try_resize(2, 0).unwrap();
- /// assert_eq!(vec, [1, 2]);
- ///
- /// let mut vec = vec![42];
- /// let result = vec.try_resize(usize::MAX, 0);
- /// assert!(result.is_err());
- /// ```
- #[stable(feature = "kernel", since = "1.0.0")]
- pub fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), TryReserveError> {
- let len = self.len();
-
- if new_len > len {
- self.try_extend_with(new_len - len, value)
- } else {
- self.truncate(new_len);
- Ok(())
- }
- }
-
- /// Clones and appends all elements in a slice to the `Vec`.
- ///
- /// Iterates over the slice `other`, clones each element, and then appends
- /// it to this `Vec`. The `other` slice is traversed in-order.
- ///
- /// Note that this function is same as [`extend`] except that it is
- /// specialized to work with slices instead. If and when Rust gets
- /// specialization this function will likely be deprecated (but still
- /// available).
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1];
- /// vec.extend_from_slice(&[2, 3, 4]);
- /// assert_eq!(vec, [1, 2, 3, 4]);
- /// ```
- ///
- /// [`extend`]: Vec::extend
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
- pub fn extend_from_slice(&mut self, other: &[T]) {
- self.spec_extend(other.iter())
- }
-
- /// Tries to clone and append all elements in a slice to the `Vec`.
- ///
- /// Iterates over the slice `other`, clones each element, and then appends
- /// it to this `Vec`. The `other` slice is traversed in-order.
- ///
- /// Note that this function is same as [`extend`] except that it is
- /// specialized to work with slices instead. If and when Rust gets
- /// specialization this function will likely be deprecated (but still
- /// available).
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1];
- /// vec.try_extend_from_slice(&[2, 3, 4]).unwrap();
- /// assert_eq!(vec, [1, 2, 3, 4]);
- /// ```
- ///
- /// [`extend`]: Vec::extend
- #[stable(feature = "kernel", since = "1.0.0")]
- pub fn try_extend_from_slice(&mut self, other: &[T]) -> Result<(), TryReserveError> {
- self.try_spec_extend(other.iter())
- }
-
- /// Copies elements from `src` range to the end of the vector.
- ///
- /// # Panics
- ///
- /// Panics if the starting point is greater than the end point or if
- /// the end point is greater than the length of the vector.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![0, 1, 2, 3, 4];
- ///
- /// vec.extend_from_within(2..);
- /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4]);
- ///
- /// vec.extend_from_within(..2);
- /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1]);
- ///
- /// vec.extend_from_within(4..8);
- /// assert_eq!(vec, [0, 1, 2, 3, 4, 2, 3, 4, 0, 1, 4, 2, 3, 4]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[stable(feature = "vec_extend_from_within", since = "1.53.0")]
- pub fn extend_from_within<R>(&mut self, src: R)
- where
- R: RangeBounds<usize>,
- {
- let range = slice::range(src, ..self.len());
- self.reserve(range.len());
-
- // SAFETY:
- // - `slice::range` guarantees that the given range is valid for indexing self
- unsafe {
- self.spec_extend_from_within(range);
- }
- }
-}
-
-impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {
- /// Takes a `Vec<[T; N]>` and flattens it into a `Vec<T>`.
- ///
- /// # Panics
- ///
- /// Panics if the length of the resulting vector would overflow a `usize`.
- ///
- /// This is only possible when flattening a vector of arrays of zero-sized
- /// types, and thus tends to be irrelevant in practice. If
- /// `size_of::<T>() > 0`, this will never panic.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(slice_flatten)]
- ///
- /// let mut vec = vec![[1, 2, 3], [4, 5, 6], [7, 8, 9]];
- /// assert_eq!(vec.pop(), Some([7, 8, 9]));
- ///
- /// let mut flattened = vec.into_flattened();
- /// assert_eq!(flattened.pop(), Some(6));
- /// ```
- #[unstable(feature = "slice_flatten", issue = "95629")]
- pub fn into_flattened(self) -> Vec<T, A> {
- let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
- let (new_len, new_cap) = if T::IS_ZST {
- (len.checked_mul(N).expect("vec len overflow"), usize::MAX)
- } else {
- // SAFETY:
- // - `cap * N` cannot overflow because the allocation is already in
- // the address space.
- // - Each `[T; N]` has `N` valid elements, so there are `len * N`
- // valid elements in the allocation.
- unsafe { (len.unchecked_mul(N), cap.unchecked_mul(N)) }
- };
- // SAFETY:
- // - `ptr` was allocated by `self`
- // - `ptr` is well-aligned because `[T; N]` has the same alignment as `T`.
- // - `new_cap` refers to the same sized allocation as `cap` because
- // `new_cap * size_of::<T>()` == `cap * size_of::<[T; N]>()`
- // - `len` <= `cap`, so `len * N` <= `cap * N`.
- unsafe { Vec::<T, A>::from_raw_parts_in(ptr.cast(), new_len, new_cap, alloc) }
- }
-}
-
-impl<T: Clone, A: Allocator> Vec<T, A> {
- #[cfg(not(no_global_oom_handling))]
- /// Extend the vector by `n` clones of value.
- fn extend_with(&mut self, n: usize, value: T) {
- self.reserve(n);
-
- unsafe {
- let mut ptr = self.as_mut_ptr().add(self.len());
- // Use SetLenOnDrop to work around bug where compiler
- // might not realize the store through `ptr` through self.set_len()
- // don't alias.
- let mut local_len = SetLenOnDrop::new(&mut self.len);
-
- // Write all elements except the last one
- for _ in 1..n {
- ptr::write(ptr, value.clone());
- ptr = ptr.add(1);
- // Increment the length in every step in case clone() panics
- local_len.increment_len(1);
- }
-
- if n > 0 {
- // We can write the last element directly without cloning needlessly
- ptr::write(ptr, value);
- local_len.increment_len(1);
- }
-
- // len set by scope guard
- }
- }
-
- /// Try to extend the vector by `n` clones of value.
- fn try_extend_with(&mut self, n: usize, value: T) -> Result<(), TryReserveError> {
- self.try_reserve(n)?;
-
- unsafe {
- let mut ptr = self.as_mut_ptr().add(self.len());
- // Use SetLenOnDrop to work around bug where compiler
- // might not realize the store through `ptr` through self.set_len()
- // don't alias.
- let mut local_len = SetLenOnDrop::new(&mut self.len);
-
- // Write all elements except the last one
- for _ in 1..n {
- ptr::write(ptr, value.clone());
- ptr = ptr.add(1);
- // Increment the length in every step in case clone() panics
- local_len.increment_len(1);
- }
-
- if n > 0 {
- // We can write the last element directly without cloning needlessly
- ptr::write(ptr, value);
- local_len.increment_len(1);
- }
-
- // len set by scope guard
- Ok(())
- }
- }
-}
-
-impl<T: PartialEq, A: Allocator> Vec<T, A> {
- /// Removes consecutive repeated elements in the vector according to the
- /// [`PartialEq`] trait implementation.
- ///
- /// If the vector is sorted, this removes all duplicates.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1, 2, 2, 3, 2];
- ///
- /// vec.dedup();
- ///
- /// assert_eq!(vec, [1, 2, 3, 2]);
- /// ```
- #[stable(feature = "rust1", since = "1.0.0")]
- #[inline]
- pub fn dedup(&mut self) {
- self.dedup_by(|a, b| a == b)
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Internal methods and functions
-////////////////////////////////////////////////////////////////////////////////
-
-#[doc(hidden)]
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
- <T as SpecFromElem>::from_elem(elem, n, Global)
-}
-
-#[doc(hidden)]
-#[cfg(not(no_global_oom_handling))]
-#[unstable(feature = "allocator_api", issue = "32838")]
-pub fn from_elem_in<T: Clone, A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
- <T as SpecFromElem>::from_elem(elem, n, alloc)
-}
-
-#[cfg(not(no_global_oom_handling))]
-trait ExtendFromWithinSpec {
- /// # Safety
- ///
- /// - `src` needs to be valid index
- /// - `self.capacity() - self.len()` must be `>= src.len()`
- unsafe fn spec_extend_from_within(&mut self, src: Range<usize>);
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Clone, A: Allocator> ExtendFromWithinSpec for Vec<T, A> {
- default unsafe fn spec_extend_from_within(&mut self, src: Range<usize>) {
- // SAFETY:
- // - len is increased only after initializing elements
- let (this, spare, len) = unsafe { self.split_at_spare_mut_with_len() };
-
- // SAFETY:
- // - caller guarantees that src is a valid index
- let to_clone = unsafe { this.get_unchecked(src) };
-
- iter::zip(to_clone, spare)
- .map(|(src, dst)| dst.write(src.clone()))
- // Note:
- // - Element was just initialized with `MaybeUninit::write`, so it's ok to increase len
- // - len is increased after each element to prevent leaks (see issue #82533)
- .for_each(|_| *len += 1);
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Copy, A: Allocator> ExtendFromWithinSpec for Vec<T, A> {
- unsafe fn spec_extend_from_within(&mut self, src: Range<usize>) {
- let count = src.len();
- {
- let (init, spare) = self.split_at_spare_mut();
-
- // SAFETY:
- // - caller guarantees that `src` is a valid index
- let source = unsafe { init.get_unchecked(src) };
-
- // SAFETY:
- // - Both pointers are created from unique slice references (`&mut [_]`)
- // so they are valid and do not overlap.
- // - Elements are :Copy so it's OK to copy them, without doing
- // anything with the original values
- // - `count` is equal to the len of `source`, so source is valid for
- // `count` reads
- // - `.reserve(count)` guarantees that `spare.len() >= count` so spare
- // is valid for `count` writes
- unsafe { ptr::copy_nonoverlapping(source.as_ptr(), spare.as_mut_ptr() as _, count) };
- }
-
- // SAFETY:
- // - The elements were just initialized by `copy_nonoverlapping`
- self.len += count;
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Common trait implementations for Vec
-////////////////////////////////////////////////////////////////////////////////
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> ops::Deref for Vec<T, A> {
- type Target = [T];
-
- #[inline]
- fn deref(&self) -> &[T] {
- unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> ops::DerefMut for Vec<T, A> {
- #[inline]
- fn deref_mut(&mut self) -> &mut [T] {
- unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
- #[cfg(not(test))]
- fn clone(&self) -> Self {
- let alloc = self.allocator().clone();
- <[T]>::to_vec_in(&**self, alloc)
- }
-
- // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
- // required for this method definition, is not available. Instead use the
- // `slice::to_vec` function which is only available with cfg(test)
- // NB see the slice::hack module in slice.rs for more information
- #[cfg(test)]
- fn clone(&self) -> Self {
- let alloc = self.allocator().clone();
- crate::slice::to_vec(&**self, alloc)
- }
-
- fn clone_from(&mut self, other: &Self) {
- crate::slice::SpecCloneIntoVec::clone_into(other.as_slice(), self);
- }
-}
-
-/// The hash of a vector is the same as that of the corresponding slice,
-/// as required by the `core::borrow::Borrow` implementation.
-///
-/// ```
-/// use std::hash::BuildHasher;
-///
-/// let b = std::hash::RandomState::new();
-/// let v: Vec<u8> = vec![0xa8, 0x3c, 0x09];
-/// let s: &[u8] = &[0xa8, 0x3c, 0x09];
-/// assert_eq!(b.hash_one(v), b.hash_one(s));
-/// ```
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Hash, A: Allocator> Hash for Vec<T, A> {
- #[inline]
- fn hash<H: Hasher>(&self, state: &mut H) {
- Hash::hash(&**self, state)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented(
- message = "vector indices are of type `usize` or ranges of `usize`",
- label = "vector indices are of type `usize` or ranges of `usize`"
-)]
-impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for Vec<T, A> {
- type Output = I::Output;
-
- #[inline]
- fn index(&self, index: I) -> &Self::Output {
- Index::index(&**self, index)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented(
- message = "vector indices are of type `usize` or ranges of `usize`",
- label = "vector indices are of type `usize` or ranges of `usize`"
-)]
-impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
- #[inline]
- fn index_mut(&mut self, index: I) -> &mut Self::Output {
- IndexMut::index_mut(&mut **self, index)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> FromIterator<T> for Vec<T> {
- #[inline]
- fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
- <Self as SpecFromIter<T, I::IntoIter>>::from_iter(iter.into_iter())
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> IntoIterator for Vec<T, A> {
- type Item = T;
- type IntoIter = IntoIter<T, A>;
-
- /// Creates a consuming iterator, that is, one that moves each value out of
- /// the vector (from start to end). The vector cannot be used after calling
- /// this.
- ///
- /// # Examples
- ///
- /// ```
- /// let v = vec!["a".to_string(), "b".to_string()];
- /// let mut v_iter = v.into_iter();
- ///
- /// let first_element: Option<String> = v_iter.next();
- ///
- /// assert_eq!(first_element, Some("a".to_string()));
- /// assert_eq!(v_iter.next(), Some("b".to_string()));
- /// assert_eq!(v_iter.next(), None);
- /// ```
- #[inline]
- fn into_iter(self) -> Self::IntoIter {
- unsafe {
- let mut me = ManuallyDrop::new(self);
- let alloc = ManuallyDrop::new(ptr::read(me.allocator()));
- let begin = me.as_mut_ptr();
- let end = if T::IS_ZST {
- begin.wrapping_byte_add(me.len())
- } else {
- begin.add(me.len()) as *const T
- };
- let cap = me.buf.capacity();
- IntoIter {
- buf: NonNull::new_unchecked(begin),
- phantom: PhantomData,
- cap,
- alloc,
- ptr: begin,
- end,
- }
- }
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T, A: Allocator> IntoIterator for &'a Vec<T, A> {
- type Item = &'a T;
- type IntoIter = slice::Iter<'a, T>;
-
- fn into_iter(self) -> Self::IntoIter {
- self.iter()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> {
- type Item = &'a mut T;
- type IntoIter = slice::IterMut<'a, T>;
-
- fn into_iter(self) -> Self::IntoIter {
- self.iter_mut()
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> Extend<T> for Vec<T, A> {
- #[inline]
- fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
- <Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
- }
-
- #[inline]
- fn extend_one(&mut self, item: T) {
- self.push(item);
- }
-
- #[inline]
- fn extend_reserve(&mut self, additional: usize) {
- self.reserve(additional);
- }
-}
-
-impl<T, A: Allocator> Vec<T, A> {
- // leaf method to which various SpecFrom/SpecExtend implementations delegate when
- // they have no further optimizations to apply
- #[cfg(not(no_global_oom_handling))]
- fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
- // This is the case for a general iterator.
- //
- // This function should be the moral equivalent of:
- //
- // for item in iterator {
- // self.push(item);
- // }
- while let Some(element) = iterator.next() {
- let len = self.len();
- if len == self.capacity() {
- let (lower, _) = iterator.size_hint();
- self.reserve(lower.saturating_add(1));
- }
- unsafe {
- ptr::write(self.as_mut_ptr().add(len), element);
- // Since next() executes user code which can panic we have to bump the length
- // after each step.
- // NB can't overflow since we would have had to alloc the address space
- self.set_len(len + 1);
- }
- }
- }
-
- // leaf method to which various SpecFrom/SpecExtend implementations delegate when
- // they have no further optimizations to apply
- fn try_extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) -> Result<(), TryReserveError> {
- // This is the case for a general iterator.
- //
- // This function should be the moral equivalent of:
- //
- // for item in iterator {
- // self.push(item);
- // }
- while let Some(element) = iterator.next() {
- let len = self.len();
- if len == self.capacity() {
- let (lower, _) = iterator.size_hint();
- self.try_reserve(lower.saturating_add(1))?;
- }
- unsafe {
- ptr::write(self.as_mut_ptr().add(len), element);
- // Since next() executes user code which can panic we have to bump the length
- // after each step.
- // NB can't overflow since we would have had to alloc the address space
- self.set_len(len + 1);
- }
- }
-
- Ok(())
- }
-
- // specific extend for `TrustedLen` iterators, called both by the specializations
- // and internal places where resolving specialization makes compilation slower
- #[cfg(not(no_global_oom_handling))]
- fn extend_trusted(&mut self, iterator: impl iter::TrustedLen<Item = T>) {
- let (low, high) = iterator.size_hint();
- if let Some(additional) = high {
- debug_assert_eq!(
- low,
- additional,
- "TrustedLen iterator's size hint is not exact: {:?}",
- (low, high)
- );
- self.reserve(additional);
- unsafe {
- let ptr = self.as_mut_ptr();
- let mut local_len = SetLenOnDrop::new(&mut self.len);
- iterator.for_each(move |element| {
- ptr::write(ptr.add(local_len.current_len()), element);
- // Since the loop executes user code which can panic we have to update
- // the length every step to correctly drop what we've written.
- // NB can't overflow since we would have had to alloc the address space
- local_len.increment_len(1);
- });
- }
- } else {
- // Per TrustedLen contract a `None` upper bound means that the iterator length
- // truly exceeds usize::MAX, which would eventually lead to a capacity overflow anyway.
- // Since the other branch already panics eagerly (via `reserve()`) we do the same here.
- // This avoids additional codegen for a fallback code path which would eventually
- // panic anyway.
- panic!("capacity overflow");
- }
- }
-
- // specific extend for `TrustedLen` iterators, called both by the specializations
- // and internal places where resolving specialization makes compilation slower
- fn try_extend_trusted(&mut self, iterator: impl iter::TrustedLen<Item = T>) -> Result<(), TryReserveError> {
- let (low, high) = iterator.size_hint();
- if let Some(additional) = high {
- debug_assert_eq!(
- low,
- additional,
- "TrustedLen iterator's size hint is not exact: {:?}",
- (low, high)
- );
- self.try_reserve(additional)?;
- unsafe {
- let ptr = self.as_mut_ptr();
- let mut local_len = SetLenOnDrop::new(&mut self.len);
- iterator.for_each(move |element| {
- ptr::write(ptr.add(local_len.current_len()), element);
- // Since the loop executes user code which can panic we have to update
- // the length every step to correctly drop what we've written.
- // NB can't overflow since we would have had to alloc the address space
- local_len.increment_len(1);
- });
- }
- Ok(())
- } else {
- Err(TryReserveErrorKind::CapacityOverflow.into())
- }
- }
-
- /// Creates a splicing iterator that replaces the specified range in the vector
- /// with the given `replace_with` iterator and yields the removed items.
- /// `replace_with` does not need to be the same length as `range`.
- ///
- /// `range` is removed even if the iterator is not consumed until the end.
- ///
- /// It is unspecified how many elements are removed from the vector
- /// if the `Splice` value is leaked.
- ///
- /// The input iterator `replace_with` is only consumed when the `Splice` value is dropped.
- ///
- /// This is optimal if:
- ///
- /// * The tail (elements in the vector after `range`) is empty,
- /// * or `replace_with` yields fewer or equal elements than `range`’s length
- /// * or the lower bound of its `size_hint()` is exact.
- ///
- /// Otherwise, a temporary vector is allocated and the tail is moved twice.
- ///
- /// # Panics
- ///
- /// Panics if the starting point is greater than the end point or if
- /// the end point is greater than the length of the vector.
- ///
- /// # Examples
- ///
- /// ```
- /// let mut v = vec![1, 2, 3, 4];
- /// let new = [7, 8, 9];
- /// let u: Vec<_> = v.splice(1..3, new).collect();
- /// assert_eq!(v, &[1, 7, 8, 9, 4]);
- /// assert_eq!(u, &[2, 3]);
- /// ```
- #[cfg(not(no_global_oom_handling))]
- #[inline]
- #[stable(feature = "vec_splice", since = "1.21.0")]
- pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A>
- where
- R: RangeBounds<usize>,
- I: IntoIterator<Item = T>,
- {
- Splice { drain: self.drain(range), replace_with: replace_with.into_iter() }
- }
-
- /// Creates an iterator which uses a closure to determine if an element should be removed.
- ///
- /// If the closure returns true, then the element is removed and yielded.
- /// If the closure returns false, the element will remain in the vector and will not be yielded
- /// by the iterator.
- ///
- /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
- /// or the iteration short-circuits, then the remaining elements will be retained.
- /// Use [`retain`] with a negated predicate if you do not need the returned iterator.
- ///
- /// [`retain`]: Vec::retain
- ///
- /// Using this method is equivalent to the following code:
- ///
- /// ```
- /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
- /// # let mut vec = vec![1, 2, 3, 4, 5, 6];
- /// let mut i = 0;
- /// while i < vec.len() {
- /// if some_predicate(&mut vec[i]) {
- /// let val = vec.remove(i);
- /// // your code here
- /// } else {
- /// i += 1;
- /// }
- /// }
- ///
- /// # assert_eq!(vec, vec![1, 4, 5]);
- /// ```
- ///
- /// But `extract_if` is easier to use. `extract_if` is also more efficient,
- /// because it can backshift the elements of the array in bulk.
- ///
- /// Note that `extract_if` also lets you mutate every element in the filter closure,
- /// regardless of whether you choose to keep or remove it.
- ///
- /// # Examples
- ///
- /// Splitting an array into evens and odds, reusing the original allocation:
- ///
- /// ```
- /// #![feature(extract_if)]
- /// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
- ///
- /// let evens = numbers.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
- /// let odds = numbers;
- ///
- /// assert_eq!(evens, vec![2, 4, 6, 8, 14]);
- /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]);
- /// ```
- #[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
- pub fn extract_if<F>(&mut self, filter: F) -> ExtractIf<'_, T, F, A>
- where
- F: FnMut(&mut T) -> bool,
- {
- let old_len = self.len();
-
- // Guard against us getting leaked (leak amplification)
- unsafe {
- self.set_len(0);
- }
-
- ExtractIf { vec: self, idx: 0, del: 0, old_len, pred: filter }
- }
-}
-
-/// Extend implementation that copies elements out of references before pushing them onto the Vec.
-///
-/// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to
-/// append the entire slice at once.
-///
-/// [`copy_from_slice`]: slice::copy_from_slice
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, T: Copy + 'a, A: Allocator> Extend<&'a T> for Vec<T, A> {
- fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
- self.spec_extend(iter.into_iter())
- }
-
- #[inline]
- fn extend_one(&mut self, &item: &'a T) {
- self.push(item);
- }
-
- #[inline]
- fn extend_reserve(&mut self, additional: usize) {
- self.reserve(additional);
- }
-}
-
-/// Implements comparison of vectors, [lexicographically](Ord#lexicographical-comparison).
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A1, A2> PartialOrd<Vec<T, A2>> for Vec<T, A1>
-where
- T: PartialOrd,
- A1: Allocator,
- A2: Allocator,
-{
- #[inline]
- fn partial_cmp(&self, other: &Vec<T, A2>) -> Option<Ordering> {
- PartialOrd::partial_cmp(&**self, &**other)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Eq, A: Allocator> Eq for Vec<T, A> {}
-
-/// Implements ordering of vectors, [lexicographically](Ord#lexicographical-comparison).
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord, A: Allocator> Ord for Vec<T, A> {
- #[inline]
- fn cmp(&self, other: &Self) -> Ordering {
- Ord::cmp(&**self, &**other)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec<T, A> {
- fn drop(&mut self) {
- unsafe {
- // use drop for [T]
- // use a raw slice to refer to the elements of the vector as weakest necessary type;
- // could avoid questions of validity in certain cases
- ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
- }
- // RawVec handles deallocation
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Default for Vec<T> {
- /// Creates an empty `Vec<T>`.
- ///
- /// The vector will not allocate until elements are pushed onto it.
- fn default() -> Vec<T> {
- Vec::new()
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: fmt::Debug, A: Allocator> fmt::Debug for Vec<T, A> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt(&**self, f)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> AsRef<Vec<T, A>> for Vec<T, A> {
- fn as_ref(&self) -> &Vec<T, A> {
- self
- }
-}
-
-#[stable(feature = "vec_as_mut", since = "1.5.0")]
-impl<T, A: Allocator> AsMut<Vec<T, A>> for Vec<T, A> {
- fn as_mut(&mut self) -> &mut Vec<T, A> {
- self
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T, A: Allocator> AsRef<[T]> for Vec<T, A> {
- fn as_ref(&self) -> &[T] {
- self
- }
-}
-
-#[stable(feature = "vec_as_mut", since = "1.5.0")]
-impl<T, A: Allocator> AsMut<[T]> for Vec<T, A> {
- fn as_mut(&mut self) -> &mut [T] {
- self
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone> From<&[T]> for Vec<T> {
- /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(Vec::from(&[1, 2, 3][..]), vec![1, 2, 3]);
- /// ```
- #[cfg(not(test))]
- fn from(s: &[T]) -> Vec<T> {
- s.to_vec()
- }
- #[cfg(test)]
- fn from(s: &[T]) -> Vec<T> {
- crate::slice::to_vec(s, Global)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "vec_from_mut", since = "1.19.0")]
-impl<T: Clone> From<&mut [T]> for Vec<T> {
- /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(Vec::from(&mut [1, 2, 3][..]), vec![1, 2, 3]);
- /// ```
- #[cfg(not(test))]
- fn from(s: &mut [T]) -> Vec<T> {
- s.to_vec()
- }
- #[cfg(test)]
- fn from(s: &mut [T]) -> Vec<T> {
- crate::slice::to_vec(s, Global)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "vec_from_array_ref", since = "1.74.0")]
-impl<T: Clone, const N: usize> From<&[T; N]> for Vec<T> {
- /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(Vec::from(&[1, 2, 3]), vec![1, 2, 3]);
- /// ```
- fn from(s: &[T; N]) -> Vec<T> {
- Self::from(s.as_slice())
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "vec_from_array_ref", since = "1.74.0")]
-impl<T: Clone, const N: usize> From<&mut [T; N]> for Vec<T> {
- /// Allocate a `Vec<T>` and fill it by cloning `s`'s items.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(Vec::from(&mut [1, 2, 3]), vec![1, 2, 3]);
- /// ```
- fn from(s: &mut [T; N]) -> Vec<T> {
- Self::from(s.as_mut_slice())
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "vec_from_array", since = "1.44.0")]
-impl<T, const N: usize> From<[T; N]> for Vec<T> {
- /// Allocate a `Vec<T>` and move `s`'s items into it.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(Vec::from([1, 2, 3]), vec![1, 2, 3]);
- /// ```
- #[cfg(not(test))]
- fn from(s: [T; N]) -> Vec<T> {
- <[T]>::into_vec(Box::new(s))
- }
-
- #[cfg(test)]
- fn from(s: [T; N]) -> Vec<T> {
- crate::slice::into_vec(Box::new(s))
- }
-}
-
-#[cfg(not(no_borrow))]
-#[stable(feature = "vec_from_cow_slice", since = "1.14.0")]
-impl<'a, T> From<Cow<'a, [T]>> for Vec<T>
-where
- [T]: ToOwned<Owned = Vec<T>>,
-{
- /// Convert a clone-on-write slice into a vector.
- ///
- /// If `s` already owns a `Vec<T>`, it will be returned directly.
- /// If `s` is borrowing a slice, a new `Vec<T>` will be allocated and
- /// filled by cloning `s`'s items into it.
- ///
- /// # Examples
- ///
- /// ```
- /// # use std::borrow::Cow;
- /// let o: Cow<'_, [i32]> = Cow::Owned(vec![1, 2, 3]);
- /// let b: Cow<'_, [i32]> = Cow::Borrowed(&[1, 2, 3]);
- /// assert_eq!(Vec::from(o), Vec::from(b));
- /// ```
- fn from(s: Cow<'a, [T]>) -> Vec<T> {
- s.into_owned()
- }
-}
-
-// note: test pulls in std, which causes errors here
-#[cfg(not(test))]
-#[stable(feature = "vec_from_box", since = "1.18.0")]
-impl<T, A: Allocator> From<Box<[T], A>> for Vec<T, A> {
- /// Convert a boxed slice into a vector by transferring ownership of
- /// the existing heap allocation.
- ///
- /// # Examples
- ///
- /// ```
- /// let b: Box<[i32]> = vec![1, 2, 3].into_boxed_slice();
- /// assert_eq!(Vec::from(b), vec![1, 2, 3]);
- /// ```
- fn from(s: Box<[T], A>) -> Self {
- s.into_vec()
- }
-}
-
-// note: test pulls in std, which causes errors here
-#[cfg(not(no_global_oom_handling))]
-#[cfg(not(test))]
-#[stable(feature = "box_from_vec", since = "1.20.0")]
-impl<T, A: Allocator> From<Vec<T, A>> for Box<[T], A> {
- /// Convert a vector into a boxed slice.
- ///
- /// If `v` has excess capacity, its items will be moved into a
- /// newly-allocated buffer with exactly the right capacity.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice());
- /// ```
- ///
- /// Any excess capacity is removed:
- /// ```
- /// let mut vec = Vec::with_capacity(10);
- /// vec.extend([1, 2, 3]);
- ///
- /// assert_eq!(Box::from(vec), vec![1, 2, 3].into_boxed_slice());
- /// ```
- fn from(v: Vec<T, A>) -> Self {
- v.into_boxed_slice()
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-#[stable(feature = "rust1", since = "1.0.0")]
-impl From<&str> for Vec<u8> {
- /// Allocate a `Vec<u8>` and fill it with a UTF-8 string.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(Vec::from("123"), vec![b'1', b'2', b'3']);
- /// ```
- fn from(s: &str) -> Vec<u8> {
- From::from(s.as_bytes())
- }
-}
-
-#[stable(feature = "array_try_from_vec", since = "1.48.0")]
-impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
- type Error = Vec<T, A>;
-
- /// Gets the entire contents of the `Vec<T>` as an array,
- /// if its size exactly matches that of the requested array.
- ///
- /// # Examples
- ///
- /// ```
- /// assert_eq!(vec![1, 2, 3].try_into(), Ok([1, 2, 3]));
- /// assert_eq!(<Vec<i32>>::new().try_into(), Ok([]));
- /// ```
- ///
- /// If the length doesn't match, the input comes back in `Err`:
- /// ```
- /// let r: Result<[i32; 4], _> = (0..10).collect::<Vec<_>>().try_into();
- /// assert_eq!(r, Err(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]));
- /// ```
- ///
- /// If you're fine with just getting a prefix of the `Vec<T>`,
- /// you can call [`.truncate(N)`](Vec::truncate) first.
- /// ```
- /// let mut v = String::from("hello world").into_bytes();
- /// v.sort();
- /// v.truncate(2);
- /// let [a, b]: [_; 2] = v.try_into().unwrap();
- /// assert_eq!(a, b' ');
- /// assert_eq!(b, b'd');
- /// ```
- fn try_from(mut vec: Vec<T, A>) -> Result<[T; N], Vec<T, A>> {
- if vec.len() != N {
- return Err(vec);
- }
-
- // SAFETY: `.set_len(0)` is always sound.
- unsafe { vec.set_len(0) };
-
- // SAFETY: A `Vec`'s pointer is always aligned properly, and
- // the alignment the array needs is the same as the items.
- // We checked earlier that we have sufficient items.
- // The items will not double-drop as the `set_len`
- // tells the `Vec` not to also drop them.
- let array = unsafe { ptr::read(vec.as_ptr() as *const [T; N]) };
- Ok(array)
- }
-}
diff --git a/rust/alloc/vec/partial_eq.rs b/rust/alloc/vec/partial_eq.rs
deleted file mode 100644
index 10ad4e4922..0000000000
--- a/rust/alloc/vec/partial_eq.rs
+++ /dev/null
@@ -1,49 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-use crate::alloc::Allocator;
-#[cfg(not(no_global_oom_handling))]
-use crate::borrow::Cow;
-
-use super::Vec;
-
-macro_rules! __impl_slice_eq1 {
- ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => {
- #[$stability]
- impl<T, U, $($vars)*> PartialEq<$rhs> for $lhs
- where
- T: PartialEq<U>,
- $($ty: $bound)?
- {
- #[inline]
- fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] }
- #[inline]
- fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] }
- }
- }
-}
-
-__impl_slice_eq1! { [A1: Allocator, A2: Allocator] Vec<T, A1>, Vec<U, A2>, #[stable(feature = "rust1", since = "1.0.0")] }
-__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &[U], #[stable(feature = "rust1", since = "1.0.0")] }
-__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] }
-__impl_slice_eq1! { [A: Allocator] &[T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
-__impl_slice_eq1! { [A: Allocator] &mut [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
-__impl_slice_eq1! { [A: Allocator] Vec<T, A>, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
-__impl_slice_eq1! { [A: Allocator] [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
-#[cfg(not(no_global_oom_handling))]
-__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec<U, A> where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
-#[cfg(not(no_global_oom_handling))]
-__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
-#[cfg(not(no_global_oom_handling))]
-__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
-__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, [U; N], #[stable(feature = "rust1", since = "1.0.0")] }
-__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] }
-
-// NOTE: some less important impls are omitted to reduce code bloat
-// FIXME(Centril): Reconsider this?
-//__impl_slice_eq1! { [const N: usize] Vec<A>, &mut [B; N], }
-//__impl_slice_eq1! { [const N: usize] [A; N], Vec<B>, }
-//__impl_slice_eq1! { [const N: usize] &[A; N], Vec<B>, }
-//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec<B>, }
-//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], }
-//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], }
-//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], }
diff --git a/rust/alloc/vec/set_len_on_drop.rs b/rust/alloc/vec/set_len_on_drop.rs
deleted file mode 100644
index d3c7297b80..0000000000
--- a/rust/alloc/vec/set_len_on_drop.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
-//
-// The idea is: The length field in SetLenOnDrop is a local variable
-// that the optimizer will see does not alias with any stores through the Vec's data
-// pointer. This is a workaround for alias analysis issue #32155
-pub(super) struct SetLenOnDrop<'a> {
- len: &'a mut usize,
- local_len: usize,
-}
-
-impl<'a> SetLenOnDrop<'a> {
- #[inline]
- pub(super) fn new(len: &'a mut usize) -> Self {
- SetLenOnDrop { local_len: *len, len }
- }
-
- #[inline]
- pub(super) fn increment_len(&mut self, increment: usize) {
- self.local_len += increment;
- }
-
- #[inline]
- pub(super) fn current_len(&self) -> usize {
- self.local_len
- }
-}
-
-impl Drop for SetLenOnDrop<'_> {
- #[inline]
- fn drop(&mut self) {
- *self.len = self.local_len;
- }
-}
diff --git a/rust/alloc/vec/spec_extend.rs b/rust/alloc/vec/spec_extend.rs
deleted file mode 100644
index ada9195374..0000000000
--- a/rust/alloc/vec/spec_extend.rs
+++ /dev/null
@@ -1,119 +0,0 @@
-// SPDX-License-Identifier: Apache-2.0 OR MIT
-
-use crate::alloc::Allocator;
-use crate::collections::TryReserveError;
-use core::iter::TrustedLen;
-use core::slice::{self};
-
-use super::{IntoIter, Vec};
-
-// Specialization trait used for Vec::extend
-#[cfg(not(no_global_oom_handling))]
-pub(super) trait SpecExtend<T, I> {
- fn spec_extend(&mut self, iter: I);
-}
-
-// Specialization trait used for Vec::try_extend
-pub(super) trait TrySpecExtend<T, I> {
- fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError>;
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
-where
- I: Iterator<Item = T>,
-{
- default fn spec_extend(&mut self, iter: I) {
- self.extend_desugared(iter)
- }
-}
-
-impl<T, I, A: Allocator> TrySpecExtend<T, I> for Vec<T, A>
-where
- I: Iterator<Item = T>,
-{
- default fn try_spec_extend(&mut self, iter: I) -> Result<(), TryReserveError> {
- self.try_extend_desugared(iter)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
-where
- I: TrustedLen<Item = T>,
-{
- default fn spec_extend(&mut self, iterator: I) {
- self.extend_trusted(iterator)
- }
-}
-
-impl<T, I, A: Allocator> TrySpecExtend<T, I> for Vec<T, A>
-where
- I: TrustedLen<Item = T>,
-{
- default fn try_spec_extend(&mut self, iterator: I) -> Result<(), TryReserveError> {
- self.try_extend_trusted(iterator)
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
- fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
- unsafe {
- self.append_elements(iterator.as_slice() as _);
- }
- iterator.forget_remaining_elements();
- }
-}
-
-impl<T, A: Allocator> TrySpecExtend<T, IntoIter<T>> for Vec<T, A> {
- fn try_spec_extend(&mut self, mut iterator: IntoIter<T>) -> Result<(), TryReserveError> {
- unsafe {
- self.try_append_elements(iterator.as_slice() as _)?;
- }
- iterator.forget_remaining_elements();
- Ok(())
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<'a, T: 'a, I, A: Allocator> SpecExtend<&'a T, I> for Vec<T, A>
-where
- I: Iterator<Item = &'a T>,
- T: Clone,
-{
- default fn spec_extend(&mut self, iterator: I) {
- self.spec_extend(iterator.cloned())
- }
-}
-
-impl<'a, T: 'a, I, A: Allocator> TrySpecExtend<&'a T, I> for Vec<T, A>
-where
- I: Iterator<Item = &'a T>,
- T: Clone,
-{
- default fn try_spec_extend(&mut self, iterator: I) -> Result<(), TryReserveError> {
- self.try_spec_extend(iterator.cloned())
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
-where
- T: Copy,
-{
- fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
- let slice = iterator.as_slice();
- unsafe { self.append_elements(slice) };
- }
-}
-
-impl<'a, T: 'a, A: Allocator> TrySpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
-where
- T: Copy,
-{
- fn try_spec_extend(&mut self, iterator: slice::Iter<'a, T>) -> Result<(), TryReserveError> {
- let slice = iterator.as_slice();
- unsafe { self.try_append_elements(slice) }
- }
-}