summaryrefslogtreecommitdiffstats
path: root/library/alloc
diff options
context:
space:
mode:
Diffstat (limited to 'library/alloc')
-rw-r--r--library/alloc/benches/lib.rs1
-rw-r--r--library/alloc/src/alloc.rs12
-rw-r--r--library/alloc/src/borrow.rs1
-rw-r--r--library/alloc/src/boxed.rs68
-rw-r--r--library/alloc/src/boxed/thin.rs2
-rw-r--r--library/alloc/src/collections/btree/map.rs41
-rw-r--r--library/alloc/src/collections/btree/map/entry.rs1
-rw-r--r--library/alloc/src/collections/btree/node.rs6
-rw-r--r--library/alloc/src/collections/btree/set.rs26
-rw-r--r--library/alloc/src/collections/linked_list.rs2
-rw-r--r--library/alloc/src/collections/mod.rs1
-rw-r--r--library/alloc/src/collections/vec_deque/mod.rs14
-rw-r--r--library/alloc/src/ffi/c_str.rs3
-rw-r--r--library/alloc/src/fmt.rs2
-rw-r--r--library/alloc/src/lib.rs22
-rw-r--r--library/alloc/src/raw_vec.rs12
-rw-r--r--library/alloc/src/rc.rs12
-rw-r--r--library/alloc/src/slice.rs8
-rw-r--r--library/alloc/src/string.rs34
-rw-r--r--library/alloc/src/sync.rs61
-rw-r--r--library/alloc/src/task.rs6
-rw-r--r--library/alloc/src/vec/drain.rs4
-rw-r--r--library/alloc/src/vec/in_place_collect.rs18
-rw-r--r--library/alloc/src/vec/in_place_drop.rs15
-rw-r--r--library/alloc/src/vec/into_iter.rs27
-rw-r--r--library/alloc/src/vec/is_zero.rs22
-rw-r--r--library/alloc/src/vec/mod.rs144
-rw-r--r--library/alloc/tests/autotraits.rs293
-rw-r--r--library/alloc/tests/lib.rs7
-rw-r--r--library/alloc/tests/vec.rs65
30 files changed, 733 insertions, 197 deletions
diff --git a/library/alloc/benches/lib.rs b/library/alloc/benches/lib.rs
index 72ac897d4..d418965cd 100644
--- a/library/alloc/benches/lib.rs
+++ b/library/alloc/benches/lib.rs
@@ -3,7 +3,6 @@
#![cfg(not(target_os = "android"))]
#![feature(btree_drain_filter)]
#![feature(iter_next_chunk)]
-#![feature(map_first_last)]
#![feature(repr_simd)]
#![feature(slice_partition_dedup)]
#![feature(test)]
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs
index 80b067812..8187517cc 100644
--- a/library/alloc/src/alloc.rs
+++ b/library/alloc/src/alloc.rs
@@ -28,16 +28,20 @@ extern "Rust" {
// The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them
// like `malloc`, `realloc`, and `free`, respectively.
#[rustc_allocator]
- #[rustc_allocator_nounwind]
+ #[cfg_attr(not(bootstrap), rustc_nounwind)]
+ #[cfg_attr(bootstrap, rustc_allocator_nounwind)]
fn __rust_alloc(size: usize, align: usize) -> *mut u8;
#[rustc_deallocator]
- #[rustc_allocator_nounwind]
+ #[cfg_attr(not(bootstrap), rustc_nounwind)]
+ #[cfg_attr(bootstrap, rustc_allocator_nounwind)]
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
#[rustc_reallocator]
- #[rustc_allocator_nounwind]
+ #[cfg_attr(not(bootstrap), rustc_nounwind)]
+ #[cfg_attr(bootstrap, rustc_allocator_nounwind)]
fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
#[rustc_allocator_zeroed]
- #[rustc_allocator_nounwind]
+ #[cfg_attr(not(bootstrap), rustc_nounwind)]
+ #[cfg_attr(bootstrap, rustc_allocator_nounwind)]
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
}
diff --git a/library/alloc/src/borrow.rs b/library/alloc/src/borrow.rs
index 904a53bb4..83a138559 100644
--- a/library/alloc/src/borrow.rs
+++ b/library/alloc/src/borrow.rs
@@ -21,7 +21,6 @@ use Cow::*;
impl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>
where
B: ToOwned,
- <B as ToOwned>::Owned: 'a,
{
fn borrow(&self) -> &B {
&**self
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs
index 65e323c9e..d6681a317 100644
--- a/library/alloc/src/boxed.rs
+++ b/library/alloc/src/boxed.rs
@@ -151,7 +151,6 @@ use core::async_iter::AsyncIterator;
use core::borrow;
use core::cmp::Ordering;
use core::convert::{From, TryFrom};
-#[cfg(not(bootstrap))]
use core::error::Error;
use core::fmt;
use core::future::Future;
@@ -176,7 +175,6 @@ use crate::borrow::Cow;
use crate::raw_vec::RawVec;
#[cfg(not(no_global_oom_handling))]
use crate::str::from_boxed_utf8_unchecked;
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
use crate::string::String;
#[cfg(not(no_global_oom_handling))]
@@ -1622,6 +1620,22 @@ impl<T, const N: usize> From<[T; N]> for Box<[T]> {
}
}
+/// Casts a boxed slice to a boxed array.
+///
+/// # Safety
+///
+/// `boxed_slice.len()` must be exactly `N`.
+unsafe fn boxed_slice_as_array_unchecked<T, A: Allocator, const N: usize>(
+ boxed_slice: Box<[T], A>,
+) -> Box<[T; N], A> {
+ debug_assert_eq!(boxed_slice.len(), N);
+
+ let (ptr, alloc) = Box::into_raw_with_allocator(boxed_slice);
+ // SAFETY: Pointer and allocator came from an existing box,
+ // and our safety condition requires that the length is exactly `N`
+ unsafe { Box::from_raw_in(ptr as *mut [T; N], alloc) }
+}
+
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
type Error = Box<[T]>;
@@ -1637,13 +1651,46 @@ impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
/// `boxed_slice.len()` does not equal `N`.
fn try_from(boxed_slice: Box<[T]>) -> Result<Self, Self::Error> {
if boxed_slice.len() == N {
- Ok(unsafe { Box::from_raw(Box::into_raw(boxed_slice) as *mut [T; N]) })
+ Ok(unsafe { boxed_slice_as_array_unchecked(boxed_slice) })
} else {
Err(boxed_slice)
}
}
}
+#[cfg(not(no_global_oom_handling))]
+#[stable(feature = "boxed_array_try_from_vec", since = "1.66.0")]
+impl<T, const N: usize> TryFrom<Vec<T>> for Box<[T; N]> {
+ type Error = Vec<T>;
+
+ /// Attempts to convert a `Vec<T>` into a `Box<[T; N]>`.
+ ///
+ /// Like [`Vec::into_boxed_slice`], this is in-place if `vec.capacity() == N`,
+ /// but will require a reallocation otherwise.
+ ///
+ /// # Errors
+ ///
+ /// Returns the original `Vec<T>` in the `Err` variant if
+ /// `boxed_slice.len()` does not equal `N`.
+ ///
+ /// # Examples
+ ///
+ /// This can be used with [`vec!`] to create an array on the heap:
+ ///
+ /// ```
+ /// let state: Box<[f32; 100]> = vec![1.0; 100].try_into().unwrap();
+ /// assert_eq!(state.len(), 100);
+ /// ```
+ fn try_from(vec: Vec<T>) -> Result<Self, Self::Error> {
+ if vec.len() == N {
+ let boxed_slice = vec.into_boxed_slice();
+ Ok(unsafe { boxed_slice_as_array_unchecked(boxed_slice) })
+ } else {
+ Err(vec)
+ }
+ }
+}
+
impl<A: Allocator> Box<dyn Any, A> {
/// Attempt to downcast the box to a concrete type.
///
@@ -2037,8 +2084,7 @@ impl<T: ?Sized, A: Allocator> AsMut<T> for Box<T, A> {
* could have a method to project a Pin<T> from it.
*/
#[stable(feature = "pin", since = "1.33.0")]
-#[rustc_const_unstable(feature = "const_box", issue = "92521")]
-impl<T: ?Sized, A: Allocator> const Unpin for Box<T, A> where A: 'static {}
+impl<T: ?Sized, A: Allocator> Unpin for Box<T, A> where A: 'static {}
#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A>
@@ -2091,7 +2137,6 @@ impl<S: ?Sized + AsyncIterator + Unpin> AsyncIterator for Box<S> {
}
}
-#[cfg(not(bootstrap))]
impl dyn Error {
#[inline]
#[stable(feature = "error_downcast", since = "1.3.0")]
@@ -2109,7 +2154,6 @@ impl dyn Error {
}
}
-#[cfg(not(bootstrap))]
impl dyn Error + Send {
#[inline]
#[stable(feature = "error_downcast", since = "1.3.0")]
@@ -2124,7 +2168,6 @@ impl dyn Error + Send {
}
}
-#[cfg(not(bootstrap))]
impl dyn Error + Send + Sync {
#[inline]
#[stable(feature = "error_downcast", since = "1.3.0")]
@@ -2139,7 +2182,6 @@ impl dyn Error + Send + Sync {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, E: Error + 'a> From<E> for Box<dyn Error + 'a> {
@@ -2173,7 +2215,6 @@ impl<'a, E: Error + 'a> From<E> for Box<dyn Error + 'a> {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, E: Error + Send + Sync + 'a> From<E> for Box<dyn Error + Send + Sync + 'a> {
@@ -2213,7 +2254,6 @@ impl<'a, E: Error + Send + Sync + 'a> From<E> for Box<dyn Error + Send + Sync +
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
impl From<String> for Box<dyn Error + Send + Sync> {
@@ -2258,7 +2298,6 @@ impl From<String> for Box<dyn Error + Send + Sync> {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "string_box_error", since = "1.6.0")]
impl From<String> for Box<dyn Error> {
@@ -2281,7 +2320,6 @@ impl From<String> for Box<dyn Error> {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<&str> for Box<dyn Error + Send + Sync + 'a> {
@@ -2306,7 +2344,6 @@ impl<'a> From<&str> for Box<dyn Error + Send + Sync + 'a> {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "string_box_error", since = "1.6.0")]
impl From<&str> for Box<dyn Error> {
@@ -2329,7 +2366,6 @@ impl From<&str> for Box<dyn Error> {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "cow_box_error", since = "1.22.0")]
impl<'a, 'b> From<Cow<'b, str>> for Box<dyn Error + Send + Sync + 'a> {
@@ -2352,7 +2388,6 @@ impl<'a, 'b> From<Cow<'b, str>> for Box<dyn Error + Send + Sync + 'a> {
}
}
-#[cfg(not(bootstrap))]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "cow_box_error", since = "1.22.0")]
impl<'a> From<Cow<'a, str>> for Box<dyn Error> {
@@ -2374,7 +2409,6 @@ impl<'a> From<Cow<'a, str>> for Box<dyn Error> {
}
}
-#[cfg(not(bootstrap))]
#[stable(feature = "box_error", since = "1.8.0")]
impl<T: core::error::Error> core::error::Error for Box<T> {
#[allow(deprecated, deprecated_in_future)]
diff --git a/library/alloc/src/boxed/thin.rs b/library/alloc/src/boxed/thin.rs
index 0a20c74b0..c477c4490 100644
--- a/library/alloc/src/boxed/thin.rs
+++ b/library/alloc/src/boxed/thin.rs
@@ -2,7 +2,6 @@
// https://github.com/matthieu-m/rfc2580/blob/b58d1d3cba0d4b5e859d3617ea2d0943aaa31329/examples/thin.rs
// by matthieu-m
use crate::alloc::{self, Layout, LayoutError};
-#[cfg(not(bootstrap))]
use core::error::Error;
use core::fmt::{self, Debug, Display, Formatter};
use core::marker::PhantomData;
@@ -274,7 +273,6 @@ impl<H> WithHeader<H> {
}
}
-#[cfg(not(bootstrap))]
#[unstable(feature = "thin_box", issue = "92791")]
impl<T: ?Sized + Error> Error for ThinBox<T> {
fn source(&self) -> Option<&(dyn Error + 'static)> {
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index cacbd54b6..8a7719347 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -580,7 +580,7 @@ impl<K, V> BTreeMap<K, V> {
/// map.insert(1, "a");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[rustc_const_stable(feature = "const_btree_new", since = "1.66.0")]
#[must_use]
pub const fn new() -> BTreeMap<K, V> {
BTreeMap { root: None, length: 0, alloc: ManuallyDrop::new(Global), _marker: PhantomData }
@@ -703,7 +703,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Basic usage:
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@@ -712,7 +711,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// map.insert(2, "a");
/// assert_eq!(map.first_key_value(), Some((&1, &"b")));
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn first_key_value(&self) -> Option<(&K, &V)>
where
K: Ord,
@@ -727,7 +726,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// # Examples
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@@ -741,7 +739,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// assert_eq!(*map.get(&1).unwrap(), "first");
/// assert_eq!(*map.get(&2).unwrap(), "b");
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn first_entry(&mut self) -> Option<OccupiedEntry<'_, K, V, A>>
where
K: Ord,
@@ -765,7 +763,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Draining elements in ascending order, while keeping a usable map each iteration.
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@@ -776,7 +773,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// }
/// assert!(map.is_empty());
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn pop_first(&mut self) -> Option<(K, V)>
where
K: Ord,
@@ -792,7 +789,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Basic usage:
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@@ -800,7 +796,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// map.insert(2, "a");
/// assert_eq!(map.last_key_value(), Some((&2, &"a")));
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn last_key_value(&self) -> Option<(&K, &V)>
where
K: Ord,
@@ -815,7 +811,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// # Examples
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@@ -829,7 +824,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// assert_eq!(*map.get(&1).unwrap(), "a");
/// assert_eq!(*map.get(&2).unwrap(), "last");
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn last_entry(&mut self) -> Option<OccupiedEntry<'_, K, V, A>>
where
K: Ord,
@@ -853,7 +848,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Draining elements in descending order, while keeping a usable map each iteration.
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
@@ -864,7 +858,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// }
/// assert!(map.is_empty());
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn pop_last(&mut self) -> Option<(K, V)>
where
K: Ord,
@@ -1099,6 +1093,9 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Moves all elements from `other` into `self`, leaving `other` empty.
///
+ /// If a key from `other` is already present in `self`, the respective
+ /// value from `self` will be overwritten with the respective value from `other`.
+ ///
/// # Examples
///
/// ```
@@ -1107,10 +1104,10 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// let mut a = BTreeMap::new();
/// a.insert(1, "a");
/// a.insert(2, "b");
- /// a.insert(3, "c");
+ /// a.insert(3, "c"); // Note: Key (3) also present in b.
///
/// let mut b = BTreeMap::new();
- /// b.insert(3, "d");
+ /// b.insert(3, "d"); // Note: Key (3) also present in a.
/// b.insert(4, "e");
/// b.insert(5, "f");
///
@@ -1121,7 +1118,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// assert_eq!(a[&1], "a");
/// assert_eq!(a[&2], "b");
- /// assert_eq!(a[&3], "d");
+ /// assert_eq!(a[&3], "d"); // Note: "c" has been overwritten.
/// assert_eq!(a[&4], "e");
/// assert_eq!(a[&5], "f");
/// ```
@@ -2392,7 +2389,11 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[rustc_const_unstable(
+ feature = "const_btree_len",
+ issue = "71835",
+ implied_by = "const_btree_new"
+ )]
pub const fn len(&self) -> usize {
self.length
}
@@ -2413,7 +2414,11 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[rustc_const_unstable(
+ feature = "const_btree_len",
+ issue = "71835",
+ implied_by = "const_btree_new"
+ )]
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
diff --git a/library/alloc/src/collections/btree/map/entry.rs b/library/alloc/src/collections/btree/map/entry.rs
index cd7cdc192..370b58864 100644
--- a/library/alloc/src/collections/btree/map/entry.rs
+++ b/library/alloc/src/collections/btree/map/entry.rs
@@ -133,7 +133,6 @@ impl<'a, K: Debug + Ord, V: Debug, A: Allocator + Clone> fmt::Display
}
}
-#[cfg(not(bootstrap))]
#[unstable(feature = "map_try_insert", issue = "82766")]
impl<'a, K: core::fmt::Debug + Ord, V: core::fmt::Debug> core::error::Error
for crate::collections::btree_map::OccupiedError<'a, K, V>
diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs
index f1d2d3b30..da766b67a 100644
--- a/library/alloc/src/collections/btree/node.rs
+++ b/library/alloc/src/collections/btree/node.rs
@@ -206,9 +206,9 @@ impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
-unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
-unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
-unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::ValMut<'a>, K, V, Type> {}
+unsafe impl<K: Sync, V: Sync, Type> Send for NodeRef<marker::Immut<'_>, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Mut<'_>, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::ValMut<'_>, K, V, Type> {}
unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
index 2cfc08074..4ddb21192 100644
--- a/library/alloc/src/collections/btree/set.rs
+++ b/library/alloc/src/collections/btree/set.rs
@@ -343,7 +343,7 @@ impl<T> BTreeSet<T> {
/// let mut set: BTreeSet<i32> = BTreeSet::new();
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[rustc_const_stable(feature = "const_btree_new", since = "1.66.0")]
#[must_use]
pub const fn new() -> BTreeSet<T> {
BTreeSet { map: BTreeMap::new() }
@@ -786,7 +786,6 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// Basic usage:
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeSet;
///
/// let mut set = BTreeSet::new();
@@ -797,7 +796,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// assert_eq!(set.first(), Some(&1));
/// ```
#[must_use]
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn first(&self) -> Option<&T>
where
T: Ord,
@@ -813,7 +812,6 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// Basic usage:
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeSet;
///
/// let mut set = BTreeSet::new();
@@ -824,7 +822,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// assert_eq!(set.last(), Some(&2));
/// ```
#[must_use]
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn last(&self) -> Option<&T>
where
T: Ord,
@@ -838,7 +836,6 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// # Examples
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeSet;
///
/// let mut set = BTreeSet::new();
@@ -849,7 +846,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// }
/// assert!(set.is_empty());
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn pop_first(&mut self) -> Option<T>
where
T: Ord,
@@ -863,7 +860,6 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// # Examples
///
/// ```
- /// #![feature(map_first_last)]
/// use std::collections::BTreeSet;
///
/// let mut set = BTreeSet::new();
@@ -874,7 +870,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// }
/// assert!(set.is_empty());
/// ```
- #[unstable(feature = "map_first_last", issue = "62924")]
+ #[stable(feature = "map_first_last", since = "1.66.0")]
pub fn pop_last(&mut self) -> Option<T>
where
T: Ord,
@@ -1174,7 +1170,11 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[rustc_const_unstable(
+ feature = "const_btree_len",
+ issue = "71835",
+ implied_by = "const_btree_new"
+ )]
pub const fn len(&self) -> usize {
self.map.len()
}
@@ -1193,7 +1193,11 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[rustc_const_unstable(
+ feature = "const_btree_len",
+ issue = "71835",
+ implied_by = "const_btree_new"
+ )]
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs
index 6480fcaf9..f2f5dffc2 100644
--- a/library/alloc/src/collections/linked_list.rs
+++ b/library/alloc/src/collections/linked_list.rs
@@ -1613,7 +1613,7 @@ impl<'a, T> CursorMut<'a, T> {
None
} else {
// We can't point to the node that we pop. Copying the behavior of
- // `remove_current`, we move on the the next node in the sequence.
+ // `remove_current`, we move on to the next node in the sequence.
// If the list is of length 1 then we end pointing to the "ghost"
// node at index 0, which is expected.
if self.list.head == self.current {
diff --git a/library/alloc/src/collections/mod.rs b/library/alloc/src/collections/mod.rs
index 21d0def08..161a37573 100644
--- a/library/alloc/src/collections/mod.rs
+++ b/library/alloc/src/collections/mod.rs
@@ -153,6 +153,5 @@ trait SpecExtend<I: IntoIterator> {
fn spec_extend(&mut self, iter: I);
}
-#[cfg(not(bootstrap))]
#[stable(feature = "try_reserve", since = "1.57.0")]
impl core::error::Error for TryReserveError {}
diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs
index e3f4deb08..2a57dad89 100644
--- a/library/alloc/src/collections/vec_deque/mod.rs
+++ b/library/alloc/src/collections/vec_deque/mod.rs
@@ -12,11 +12,17 @@ use core::fmt;
use core::hash::{Hash, Hasher};
use core::iter::{repeat_with, FromIterator};
use core::marker::PhantomData;
-use core::mem::{self, ManuallyDrop, MaybeUninit};
+use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ops::{Index, IndexMut, Range, RangeBounds};
use core::ptr::{self, NonNull};
use core::slice;
+// This is used in a bunch of intra-doc links.
+// FIXME: For some reason, `#[cfg(doc)]` wasn't sufficient, resulting in
+// failures in linkchecker even though rustdoc built the docs just fine.
+#[allow(unused_imports)]
+use core::mem;
+
use crate::alloc::{Allocator, Global};
use crate::collections::TryReserveError;
use crate::collections::TryReserveErrorKind;
@@ -177,7 +183,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// Marginally more convenient
#[inline]
fn cap(&self) -> usize {
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// For zero sized types, we are always at maximum capacity
MAXIMUM_ZST_CAPACITY
} else {
@@ -3038,7 +3044,7 @@ impl<T, A: Allocator> From<Vec<T, A>> for VecDeque<T, A> {
/// `Vec<T>` came from `From<VecDeque<T>>` and hasn't been reallocated.
fn from(mut other: Vec<T, A>) -> Self {
let len = other.len();
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// There's no actual allocation for ZSTs to worry about capacity,
// but `VecDeque` can't handle as much length as `Vec`.
assert!(len < MAXIMUM_ZST_CAPACITY, "capacity overflow");
@@ -3124,7 +3130,7 @@ impl<T, const N: usize> From<[T; N]> for VecDeque<T> {
fn from(arr: [T; N]) -> Self {
let mut deq = VecDeque::with_capacity(N);
let arr = ManuallyDrop::new(arr);
- if mem::size_of::<T>() != 0 {
+ if !<T>::IS_ZST {
// SAFETY: VecDeque::with_capacity ensures that there is enough capacity.
unsafe {
ptr::copy_nonoverlapping(arr.as_ptr(), deq.ptr(), N);
diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs
index aede6d54c..11bd4c4dc 100644
--- a/library/alloc/src/ffi/c_str.rs
+++ b/library/alloc/src/ffi/c_str.rs
@@ -1122,7 +1122,6 @@ impl CStr {
}
}
-#[cfg(not(bootstrap))]
#[stable(feature = "rust1", since = "1.0.0")]
impl core::error::Error for NulError {
#[allow(deprecated)]
@@ -1131,11 +1130,9 @@ impl core::error::Error for NulError {
}
}
-#[cfg(not(bootstrap))]
#[stable(feature = "cstring_from_vec_with_nul", since = "1.58.0")]
impl core::error::Error for FromVecWithNulError {}
-#[cfg(not(bootstrap))]
#[stable(feature = "cstring_into", since = "1.7.0")]
impl core::error::Error for IntoStringError {
#[allow(deprecated)]
diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs
index ed398b566..799ce9d5d 100644
--- a/library/alloc/src/fmt.rs
+++ b/library/alloc/src/fmt.rs
@@ -327,7 +327,7 @@
//! - `text` must not contain any `'{'` or `'}'` characters,
//! - `ws` is any character for which [`char::is_whitespace`] returns `true`, has no semantic
//! meaning and is completely optional,
-//! - `integer` is a decimal integer that may contain leading zeroes and
+//! - `integer` is a decimal integer that may contain leading zeroes and must fit into an `usize` and
//! - `identifier` is an `IDENTIFIER_OR_KEYWORD` (not an `IDENTIFIER`) as defined by the [Rust language reference](https://doc.rust-lang.org/reference/identifiers.html).
//!
//! # Formatting traits
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index 8619467c2..ce36b116f 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -69,6 +69,8 @@
any(not(feature = "miri-test-libstd"), test, doctest),
no_global_oom_handling,
not(no_global_oom_handling),
+ not(no_rc),
+ not(no_sync),
target_has_atomic = "ptr"
))]
#![no_std]
@@ -97,7 +99,7 @@
#![feature(coerce_unsized)]
#![cfg_attr(not(no_global_oom_handling), feature(const_alloc_error))]
#![feature(const_box)]
-#![cfg_attr(not(no_global_oom_handling), feature(const_btree_new))]
+#![cfg_attr(not(no_global_oom_handling), feature(const_btree_len))]
#![feature(const_cow_is_borrowed)]
#![feature(const_convert)]
#![feature(const_size_of_val)]
@@ -109,10 +111,11 @@
#![feature(core_intrinsics)]
#![feature(const_eval_select)]
#![feature(const_pin)]
+#![feature(const_waker)]
#![feature(cstr_from_bytes_until_nul)]
#![feature(dispatch_from_dyn)]
-#![cfg_attr(not(bootstrap), feature(error_generic_member_access))]
-#![cfg_attr(not(bootstrap), feature(error_in_core))]
+#![feature(error_generic_member_access)]
+#![feature(error_in_core)]
#![feature(exact_size_is_empty)]
#![feature(extend_one)]
#![feature(fmt_internals)]
@@ -122,20 +125,21 @@
#![feature(iter_advance_by)]
#![feature(iter_next_chunk)]
#![feature(layout_for_ptr)]
-#![feature(maybe_uninit_array_assume_init)]
#![feature(maybe_uninit_slice)]
#![feature(maybe_uninit_uninit_array)]
+#![feature(maybe_uninit_uninit_array_transpose)]
#![cfg_attr(test, feature(new_uninit))]
#![feature(nonnull_slice_from_raw_parts)]
#![feature(pattern)]
#![feature(pointer_byte_offsets)]
-#![cfg_attr(not(bootstrap), feature(provide_any))]
+#![feature(provide_any)]
#![feature(ptr_internals)]
#![feature(ptr_metadata)]
#![feature(ptr_sub_ptr)]
#![feature(receiver_trait)]
#![feature(saturating_int_impl)]
#![feature(set_ptr_value)]
+#![feature(sized_type_properties)]
#![feature(slice_from_ptr_range)]
#![feature(slice_group_by)]
#![feature(slice_ptr_get)]
@@ -169,7 +173,6 @@
#![cfg_attr(not(test), feature(generator_trait))]
#![feature(hashmap_internals)]
#![feature(lang_items)]
-#![cfg_attr(bootstrap, feature(let_else))]
#![feature(min_specialization)]
#![feature(negative_impls)]
#![feature(never_type)]
@@ -224,16 +227,17 @@ mod boxed {
}
pub mod borrow;
pub mod collections;
-#[cfg(not(no_global_oom_handling))]
+#[cfg(all(not(no_rc), not(no_sync), not(no_global_oom_handling)))]
pub mod ffi;
pub mod fmt;
+#[cfg(not(no_rc))]
pub mod rc;
pub mod slice;
pub mod str;
pub mod string;
-#[cfg(target_has_atomic = "ptr")]
+#[cfg(all(not(no_rc), not(no_sync), target_has_atomic = "ptr"))]
pub mod sync;
-#[cfg(all(not(no_global_oom_handling), target_has_atomic = "ptr"))]
+#[cfg(all(not(no_global_oom_handling), not(no_rc), not(no_sync), target_has_atomic = "ptr"))]
pub mod task;
#[cfg(test)]
mod tests;
diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs
index b0f4529ab..5a10121bb 100644
--- a/library/alloc/src/raw_vec.rs
+++ b/library/alloc/src/raw_vec.rs
@@ -3,7 +3,7 @@
use core::alloc::LayoutError;
use core::cmp;
use core::intrinsics;
-use core::mem::{self, ManuallyDrop, MaybeUninit};
+use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ops::Drop;
use core::ptr::{self, NonNull, Unique};
use core::slice;
@@ -168,7 +168,7 @@ impl<T, A: Allocator> RawVec<T, A> {
#[cfg(not(no_global_oom_handling))]
fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self {
// Don't allocate here because `Drop` will not deallocate when `capacity` is 0.
- if mem::size_of::<T>() == 0 || capacity == 0 {
+ if T::IS_ZST || capacity == 0 {
Self::new_in(alloc)
} else {
// We avoid `unwrap_or_else` here because it bloats the amount of
@@ -229,7 +229,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// This will always be `usize::MAX` if `T` is zero-sized.
#[inline(always)]
pub fn capacity(&self) -> usize {
- if mem::size_of::<T>() == 0 { usize::MAX } else { self.cap }
+ if T::IS_ZST { usize::MAX } else { self.cap }
}
/// Returns a shared reference to the allocator backing this `RawVec`.
@@ -238,7 +238,7 @@ impl<T, A: Allocator> RawVec<T, A> {
}
fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
- if mem::size_of::<T>() == 0 || self.cap == 0 {
+ if T::IS_ZST || self.cap == 0 {
None
} else {
// We have an allocated chunk of memory, so we can bypass runtime
@@ -380,7 +380,7 @@ impl<T, A: Allocator> RawVec<T, A> {
// This is ensured by the calling contexts.
debug_assert!(additional > 0);
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// Since we return a capacity of `usize::MAX` when `elem_size` is
// 0, getting to here necessarily means the `RawVec` is overfull.
return Err(CapacityOverflow.into());
@@ -406,7 +406,7 @@ impl<T, A: Allocator> RawVec<T, A> {
// `grow_amortized`, but this method is usually instantiated less often so
// it's less critical.
fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// Since we return a capacity of `usize::MAX` when the type size is
// 0, getting to here necessarily means the `RawVec` is overfull.
return Err(CapacityOverflow.into());
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index 6d247681c..006d813e5 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -1110,8 +1110,8 @@ impl<T: ?Sized> Rc<T> {
#[inline]
#[stable(feature = "ptr_eq", since = "1.17.0")]
- /// Returns `true` if the two `Rc`s point to the same allocation
- /// (in a vein similar to [`ptr::eq`]).
+ /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
+ /// [`ptr::eq`]. See [that function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
///
/// # Examples
///
@@ -1386,7 +1386,7 @@ impl<T: ?Sized> Rc<T> {
Self::allocate_for_layout(
Layout::for_value(&*ptr),
|layout| Global.allocate(layout),
- |mem| mem.with_metadata_of(ptr as *mut RcBox<T>),
+ |mem| mem.with_metadata_of(ptr as *const RcBox<T>),
)
}
}
@@ -2419,9 +2419,9 @@ impl<T: ?Sized> Weak<T> {
}
}
- /// Returns `true` if the two `Weak`s point to the same allocation (similar to
- /// [`ptr::eq`]), or if both don't point to any allocation
- /// (because they were created with `Weak::new()`).
+ /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
+ /// both don't point to any allocation (because they were created with `Weak::new()`). See [that
+ /// function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
///
/// # Notes
///
diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs
index bcd3f49e2..a5e7bf2a1 100644
--- a/library/alloc/src/slice.rs
+++ b/library/alloc/src/slice.rs
@@ -16,9 +16,7 @@ use core::borrow::{Borrow, BorrowMut};
#[cfg(not(no_global_oom_handling))]
use core::cmp::Ordering::{self, Less};
#[cfg(not(no_global_oom_handling))]
-use core::mem;
-#[cfg(not(no_global_oom_handling))]
-use core::mem::size_of;
+use core::mem::{self, SizedTypeProperties};
#[cfg(not(no_global_oom_handling))]
use core::ptr;
@@ -205,7 +203,7 @@ impl<T> [T] {
where
T: Ord,
{
- merge_sort(self, |a, b| a.lt(b));
+ merge_sort(self, T::lt);
}
/// Sorts the slice with a comparator function.
@@ -1018,7 +1016,7 @@ where
const MIN_RUN: usize = 10;
// Sorting has no meaningful behavior on zero-sized types.
- if size_of::<T>() == 0 {
+ if T::IS_ZST {
return;
}
diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs
index f2448396c..c436adf70 100644
--- a/library/alloc/src/string.rs
+++ b/library/alloc/src/string.rs
@@ -44,7 +44,6 @@
#[cfg(not(no_global_oom_handling))]
use core::char::{decode_utf16, REPLACEMENT_CHARACTER};
-#[cfg(not(bootstrap))]
use core::error::Error;
use core::fmt;
use core::hash;
@@ -68,7 +67,7 @@ use core::str::Utf8Chunks;
use crate::borrow::{Cow, ToOwned};
use crate::boxed::Box;
use crate::collections::TryReserveError;
-use crate::str::{self, Chars, Utf8Error};
+use crate::str::{self, from_utf8_unchecked_mut, Chars, Utf8Error};
#[cfg(not(no_global_oom_handling))]
use crate::str::{from_boxed_utf8_unchecked, FromStr};
use crate::vec::Vec;
@@ -1850,6 +1849,35 @@ impl String {
let slice = self.vec.into_boxed_slice();
unsafe { from_boxed_utf8_unchecked(slice) }
}
+
+ /// Consumes and leaks the `String`, returning a mutable reference to the contents,
+ /// `&'static mut str`.
+ ///
+ /// This is mainly useful for data that lives for the remainder of
+ /// the program's life. Dropping the returned reference will cause a memory
+ /// leak.
+ ///
+ /// It does not reallocate or shrink the `String`,
+ /// so the leaked allocation may include unused capacity that is not part
+ /// of the returned slice.
+ ///
+ /// # Examples
+ ///
+ /// Simple usage:
+ ///
+ /// ```
+ /// #![feature(string_leak)]
+ ///
+ /// let x = String::from("bucket");
+ /// let static_ref: &'static mut str = x.leak();
+ /// assert_eq!(static_ref, "bucket");
+ /// ```
+ #[unstable(feature = "string_leak", issue = "102929")]
+ #[inline]
+ pub fn leak(self) -> &'static mut str {
+ let slice = self.vec.leak();
+ unsafe { from_utf8_unchecked_mut(slice) }
+ }
}
impl FromUtf8Error {
@@ -1941,7 +1969,6 @@ impl fmt::Display for FromUtf16Error {
}
}
-#[cfg(not(bootstrap))]
#[stable(feature = "rust1", since = "1.0.0")]
impl Error for FromUtf8Error {
#[allow(deprecated)]
@@ -1950,7 +1977,6 @@ impl Error for FromUtf8Error {
}
}
-#[cfg(not(bootstrap))]
#[stable(feature = "rust1", since = "1.0.0")]
impl Error for FromUtf16Error {
#[allow(deprecated)]
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 4377edeee..81cd77074 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -3,6 +3,10 @@
//! Thread-safe reference-counting pointers.
//!
//! See the [`Arc<T>`][Arc] documentation for more details.
+//!
+//! **Note**: This module is only available on platforms that support atomic
+//! loads and stores of pointers. This may be detected at compile time using
+//! `#[cfg(target_has_atomic = "ptr")]`.
use core::any::Any;
use core::borrow;
@@ -82,6 +86,11 @@ macro_rules! acquire {
/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
/// types.
///
+/// **Note**: This type is only available on platforms that support atomic
+/// loads and stores of pointers, which includes all platforms that support
+/// the `std` crate but not all those which only support [`alloc`](crate).
+/// This may be detected at compile time using `#[cfg(target_has_atomic = "ptr")]`.
+///
/// ## Thread Safety
///
/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
@@ -1108,8 +1117,8 @@ impl<T: ?Sized> Arc<T> {
drop(Weak { ptr: self.ptr });
}
- /// Returns `true` if the two `Arc`s point to the same allocation
- /// (in a vein similar to [`ptr::eq`]).
+ /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to
+ /// [`ptr::eq`]. See [that function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
///
/// # Examples
///
@@ -1195,7 +1204,7 @@ impl<T: ?Sized> Arc<T> {
Self::allocate_for_layout(
Layout::for_value(&*ptr),
|layout| Global.allocate(layout),
- |mem| mem.with_metadata_of(ptr as *mut ArcInner<T>),
+ |mem| mem.with_metadata_of(ptr as *const ArcInner<T>),
)
}
}
@@ -1980,33 +1989,26 @@ impl<T: ?Sized> Weak<T> {
// We use a CAS loop to increment the strong count instead of a
// fetch_add as this function should never take the reference count
// from zero to one.
- let inner = self.inner()?;
-
- // Relaxed load because any write of 0 that we can observe
- // leaves the field in a permanently zero state (so a
- // "stale" read of 0 is fine), and any other value is
- // confirmed via the CAS below.
- let mut n = inner.strong.load(Relaxed);
-
- loop {
- if n == 0 {
- return None;
- }
-
- // See comments in `Arc::clone` for why we do this (for `mem::forget`).
- if n > MAX_REFCOUNT {
- abort();
- }
-
+ self.inner()?
+ .strong
// Relaxed is fine for the failure case because we don't have any expectations about the new state.
// Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner
// value can be initialized after `Weak` references have already been created. In that case, we
// expect to observe the fully initialized value.
- match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) {
- Ok(_) => return Some(unsafe { Arc::from_inner(self.ptr) }), // null checked above
- Err(old) => n = old,
- }
- }
+ .fetch_update(Acquire, Relaxed, |n| {
+ // Any write of 0 we can observe leaves the field in permanently zero state.
+ if n == 0 {
+ return None;
+ }
+ // See comments in `Arc::clone` for why we do this (for `mem::forget`).
+ if n > MAX_REFCOUNT {
+ abort();
+ }
+ Some(n + 1)
+ })
+ .ok()
+ // null checked above
+ .map(|_| unsafe { Arc::from_inner(self.ptr) })
}
/// Gets the number of strong (`Arc`) pointers pointing to this allocation.
@@ -2067,9 +2069,9 @@ impl<T: ?Sized> Weak<T> {
}
}
- /// Returns `true` if the two `Weak`s point to the same allocation (similar to
- /// [`ptr::eq`]), or if both don't point to any allocation
- /// (because they were created with `Weak::new()`).
+ /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
+ /// both don't point to any allocation (because they were created with `Weak::new()`). See [that
+ /// function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
///
/// # Notes
///
@@ -2764,7 +2766,6 @@ fn data_offset_align(align: usize) -> usize {
layout.size() + layout.padding_needed_for(align)
}
-#[cfg(not(bootstrap))]
#[stable(feature = "arc_error", since = "1.52.0")]
impl<T: core::error::Error + ?Sized> core::error::Error for Arc<T> {
#[allow(deprecated, deprecated_in_future)]
diff --git a/library/alloc/src/task.rs b/library/alloc/src/task.rs
index 528ee4ff1..9d8e309a9 100644
--- a/library/alloc/src/task.rs
+++ b/library/alloc/src/task.rs
@@ -1,5 +1,11 @@
#![stable(feature = "wake_trait", since = "1.51.0")]
+
//! Types and Traits for working with asynchronous tasks.
+//!
+//! **Note**: This module is only available on platforms that support atomic
+//! loads and stores of pointers. This may be detected at compile time using
+//! `#[cfg(target_has_atomic = "ptr")]`.
+
use core::mem::ManuallyDrop;
use core::task::{RawWaker, RawWakerVTable, Waker};
diff --git a/library/alloc/src/vec/drain.rs b/library/alloc/src/vec/drain.rs
index 5b73906a1..541f99bcf 100644
--- a/library/alloc/src/vec/drain.rs
+++ b/library/alloc/src/vec/drain.rs
@@ -1,7 +1,7 @@
use crate::alloc::{Allocator, Global};
use core::fmt;
use core::iter::{FusedIterator, TrustedLen};
-use core::mem::{self, ManuallyDrop};
+use core::mem::{self, ManuallyDrop, SizedTypeProperties};
use core::ptr::{self, NonNull};
use core::slice::{self};
@@ -202,7 +202,7 @@ impl<T, A: Allocator> Drop for Drain<'_, T, A> {
let mut vec = self.vec;
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount.
// this can be achieved by manipulating the Vec length instead of moving values out from `iter`.
unsafe {
diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs
index b211421b2..87d61deb1 100644
--- a/library/alloc/src/vec/in_place_collect.rs
+++ b/library/alloc/src/vec/in_place_collect.rs
@@ -55,6 +55,9 @@
//! This is handled by the [`InPlaceDrop`] guard for sink items (`U`) and by
//! [`vec::IntoIter::forget_allocation_drop_remaining()`] for remaining source items (`T`).
//!
+//! If dropping any remaining source item (`T`) panics then [`InPlaceDstBufDrop`] will handle dropping
+//! the already collected sink items (`U`) and freeing the allocation.
+//!
//! [`vec::IntoIter::forget_allocation_drop_remaining()`]: super::IntoIter::forget_allocation_drop_remaining()
//!
//! # O(1) collect
@@ -135,10 +138,10 @@
//! vec.truncate(write_idx);
//! ```
use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce};
-use core::mem::{self, ManuallyDrop};
+use core::mem::{self, ManuallyDrop, SizedTypeProperties};
use core::ptr::{self};
-use super::{InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec};
+use super::{InPlaceDrop, InPlaceDstBufDrop, SpecFromIter, SpecFromIterNested, Vec};
/// Specialization marker for collecting an iterator pipeline into a Vec while reusing the
/// source allocation, i.e. executing the pipeline in place.
@@ -154,7 +157,7 @@ where
default fn from_iter(mut iterator: I) -> Self {
// See "Layout constraints" section in the module documentation. We rely on const
// optimization here since these conditions currently cannot be expressed as trait bounds
- if mem::size_of::<T>() == 0
+ if T::IS_ZST
|| mem::size_of::<T>()
!= mem::size_of::<<<I as SourceIter>::Source as AsVecIntoIter>::Item>()
|| mem::align_of::<T>()
@@ -191,14 +194,17 @@ where
);
}
- // Drop any remaining values at the tail of the source but prevent drop of the allocation
- // itself once IntoIter goes out of scope.
- // If the drop panics then we also leak any elements collected into dst_buf.
+ // The ownership of the allocation and the new `T` values is temporarily moved into `dst_guard`.
+ // This is safe because `forget_allocation_drop_remaining` immediately forgets the allocation
+ // before any panic can occur in order to avoid any double free, and then proceeds to drop
+ // any remaining values at the tail of the source.
//
// Note: This access to the source wouldn't be allowed by the TrustedRandomIteratorNoCoerce
// contract (used by SpecInPlaceCollect below). But see the "O(1) collect" section in the
// module documenttation why this is ok anyway.
+ let dst_guard = InPlaceDstBufDrop { ptr: dst_buf, len, cap };
src.forget_allocation_drop_remaining();
+ mem::forget(dst_guard);
let vec = unsafe { Vec::from_raw_parts(dst_buf, len, cap) };
diff --git a/library/alloc/src/vec/in_place_drop.rs b/library/alloc/src/vec/in_place_drop.rs
index 1b1ef9130..25ca33c6a 100644
--- a/library/alloc/src/vec/in_place_drop.rs
+++ b/library/alloc/src/vec/in_place_drop.rs
@@ -22,3 +22,18 @@ impl<T> Drop for InPlaceDrop<T> {
}
}
}
+
+// A helper struct for in-place collection that drops the destination allocation and elements,
+// to avoid leaking them if some other destructor panics.
+pub(super) struct InPlaceDstBufDrop<T> {
+ pub(super) ptr: *mut T,
+ pub(super) len: usize,
+ pub(super) cap: usize,
+}
+
+impl<T> Drop for InPlaceDstBufDrop<T> {
+ #[inline]
+ fn drop(&mut self) {
+ unsafe { super::Vec::from_raw_parts(self.ptr, self.len, self.cap) };
+ }
+}
diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs
index b4157fd58..02cc7691a 100644
--- a/library/alloc/src/vec/into_iter.rs
+++ b/library/alloc/src/vec/into_iter.rs
@@ -8,7 +8,7 @@ use core::iter::{
FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce,
};
use core::marker::PhantomData;
-use core::mem::{self, ManuallyDrop, MaybeUninit};
+use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
#[cfg(not(no_global_oom_handling))]
use core::ops::Deref;
use core::ptr::{self, NonNull};
@@ -95,13 +95,16 @@ impl<T, A: Allocator> IntoIter<T, A> {
}
/// Drops remaining elements and relinquishes the backing allocation.
+ /// This method guarantees it won't panic before relinquishing
+ /// the backing allocation.
///
/// This is roughly equivalent to the following, but more efficient
///
/// ```
/// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
+ /// let mut into_iter = std::mem::replace(&mut into_iter, Vec::new().into_iter());
/// (&mut into_iter).for_each(core::mem::drop);
- /// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
+ /// std::mem::forget(into_iter);
/// ```
///
/// This method is used by in-place iteration, refer to the vec::in_place_collect
@@ -118,6 +121,8 @@ impl<T, A: Allocator> IntoIter<T, A> {
self.ptr = self.buf.as_ptr();
self.end = self.buf.as_ptr();
+ // Dropping the remaining elements can panic, so this needs to be
+ // done only after updating the other fields.
unsafe {
ptr::drop_in_place(remaining);
}
@@ -149,7 +154,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
fn next(&mut self) -> Option<T> {
if self.ptr == self.end {
None
- } else if mem::size_of::<T>() == 0 {
+ } else if T::IS_ZST {
// purposefully don't use 'ptr.offset' because for
// vectors with 0-size elements this would return the
// same pointer.
@@ -167,7 +172,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- let exact = if mem::size_of::<T>() == 0 {
+ let exact = if T::IS_ZST {
self.end.addr().wrapping_sub(self.ptr.addr())
} else {
unsafe { self.end.sub_ptr(self.ptr) }
@@ -179,7 +184,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
fn advance_by(&mut self, n: usize) -> Result<(), usize> {
let step_size = self.len().min(n);
let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size);
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
// effectively results in unsigned pointers representing positions 0..usize::MAX,
// which is valid for ZSTs.
@@ -209,7 +214,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
let len = self.len();
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
if len < N {
self.forget_remaining_elements();
// Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
@@ -218,7 +223,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
self.ptr = self.ptr.wrapping_byte_add(N);
// Safety: ditto
- return Ok(unsafe { MaybeUninit::array_assume_init(raw_ary) });
+ return Ok(unsafe { raw_ary.transpose().assume_init() });
}
if len < N {
@@ -236,7 +241,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
return unsafe {
ptr::copy_nonoverlapping(self.ptr, raw_ary.as_mut_ptr() as *mut T, N);
self.ptr = self.ptr.add(N);
- Ok(MaybeUninit::array_assume_init(raw_ary))
+ Ok(raw_ary.transpose().assume_init())
};
}
@@ -253,7 +258,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
// that `T: Copy` so reading elements from the buffer doesn't invalidate
// them for `Drop`.
unsafe {
- if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
+ if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
}
}
}
@@ -264,7 +269,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
fn next_back(&mut self) -> Option<T> {
if self.end == self.ptr {
None
- } else if mem::size_of::<T>() == 0 {
+ } else if T::IS_ZST {
// See above for why 'ptr.offset' isn't used
self.end = self.end.wrapping_byte_sub(1);
@@ -280,7 +285,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
#[inline]
fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
let step_size = self.len().min(n);
- if mem::size_of::<T>() == 0 {
+ if T::IS_ZST {
// SAFETY: same as for advance_by()
self.end = self.end.wrapping_byte_sub(step_size);
} else {
diff --git a/library/alloc/src/vec/is_zero.rs b/library/alloc/src/vec/is_zero.rs
index 2e025c8a4..8e652d676 100644
--- a/library/alloc/src/vec/is_zero.rs
+++ b/library/alloc/src/vec/is_zero.rs
@@ -160,3 +160,25 @@ unsafe impl<T: IsZero> IsZero for Saturating<T> {
self.0.is_zero()
}
}
+
+macro_rules! impl_for_optional_bool {
+ ($($t:ty,)+) => {$(
+ unsafe impl IsZero for $t {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ // SAFETY: This is *not* a stable layout guarantee, but
+ // inside `core` we're allowed to rely on the current rustc
+ // behaviour that options of bools will be one byte with
+ // no padding, so long as they're nested less than 254 deep.
+ let raw: u8 = unsafe { core::mem::transmute(*self) };
+ raw == 0
+ }
+ }
+ )+};
+}
+impl_for_optional_bool! {
+ Option<bool>,
+ Option<Option<bool>>,
+ Option<Option<Option<bool>>>,
+ // Could go further, but not worth the metadata overhead
+}
diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs
index 60b36af5e..bbbdc3aa2 100644
--- a/library/alloc/src/vec/mod.rs
+++ b/library/alloc/src/vec/mod.rs
@@ -64,7 +64,7 @@ use core::iter;
#[cfg(not(no_global_oom_handling))]
use core::iter::FromIterator;
use core::marker::PhantomData;
-use core::mem::{self, ManuallyDrop, MaybeUninit};
+use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
use core::ptr::{self, NonNull};
use core::slice::{self, SliceIndex};
@@ -125,7 +125,7 @@ use self::set_len_on_drop::SetLenOnDrop;
mod set_len_on_drop;
#[cfg(not(no_global_oom_handling))]
-use self::in_place_drop::InPlaceDrop;
+use self::in_place_drop::{InPlaceDrop, InPlaceDstBufDrop};
#[cfg(not(no_global_oom_handling))]
mod in_place_drop;
@@ -483,15 +483,13 @@ impl<T> Vec<T> {
Self::with_capacity_in(capacity, Global)
}
- /// Creates a `Vec<T>` directly from the raw components of another vector.
+ /// Creates a `Vec<T>` directly from a pointer, a capacity, and a length.
///
/// # Safety
///
/// This is highly unsafe, due to the number of invariants that aren't
/// checked:
///
- /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
- /// (at least, it's highly likely to be incorrect if it wasn't).
/// * `T` needs to have the same alignment as what `ptr` was allocated with.
/// (`T` having a less strict alignment is not sufficient, the alignment really
/// needs to be equal to satisfy the [`dealloc`] requirement that memory must be
@@ -500,6 +498,14 @@ impl<T> Vec<T> {
/// to be the same size as the pointer was allocated with. (Because similar to
/// alignment, [`dealloc`] must be called with the same layout `size`.)
/// * `length` needs to be less than or equal to `capacity`.
+ /// * The first `length` values must be properly initialized values of type `T`.
+ /// * `capacity` needs to be the capacity that the pointer was allocated with.
+ /// * The allocated size in bytes must be no larger than `isize::MAX`.
+ /// See the safety documentation of [`pointer::offset`].
+ ///
+ /// These requirements are always upheld by any `ptr` that has been allocated
+ /// via `Vec<T>`. Other allocation sources are allowed if the invariants are
+ /// upheld.
///
/// Violating these may cause problems like corrupting the allocator's
/// internal data structures. For example it is normally **not** safe
@@ -551,6 +557,32 @@ impl<T> Vec<T> {
/// assert_eq!(rebuilt, [4, 5, 6]);
/// }
/// ```
+ ///
+ /// Using memory that was allocated elsewhere:
+ ///
+ /// ```rust
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::alloc::{AllocError, Allocator, Global, Layout};
+ ///
+ /// fn main() {
+ /// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
+ ///
+ /// let vec = unsafe {
+ /// let mem = match Global.allocate(layout) {
+ /// Ok(mem) => mem.cast::<u32>().as_ptr(),
+ /// Err(AllocError) => return,
+ /// };
+ ///
+ /// mem.write(1_000_000);
+ ///
+ /// Vec::from_raw_parts_in(mem, 1, 16, Global)
+ /// };
+ ///
+ /// assert_eq!(vec, &[1_000_000]);
+ /// assert_eq!(vec.capacity(), 16);
+ /// }
+ /// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Self {
@@ -641,21 +673,30 @@ impl<T, A: Allocator> Vec<T, A> {
Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 }
}
- /// Creates a `Vec<T, A>` directly from the raw components of another vector.
+ /// Creates a `Vec<T, A>` directly from a pointer, a capacity, a length,
+ /// and an allocator.
///
/// # Safety
///
/// This is highly unsafe, due to the number of invariants that aren't
/// checked:
///
- /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
- /// (at least, it's highly likely to be incorrect if it wasn't).
- /// * `T` needs to have the same size and alignment as what `ptr` was allocated with.
+ /// * `T` needs to have the same alignment as what `ptr` was allocated with.
/// (`T` having a less strict alignment is not sufficient, the alignment really
/// needs to be equal to satisfy the [`dealloc`] requirement that memory must be
/// allocated and deallocated with the same layout.)
+ /// * The size of `T` times the `capacity` (ie. the allocated size in bytes) needs
+ /// to be the same size as the pointer was allocated with. (Because similar to
+ /// alignment, [`dealloc`] must be called with the same layout `size`.)
/// * `length` needs to be less than or equal to `capacity`.
- /// * `capacity` needs to be the capacity that the pointer was allocated with.
+ /// * The first `length` values must be properly initialized values of type `T`.
+ /// * `capacity` needs to [*fit*] the layout size that the pointer was allocated with.
+ /// * The allocated size in bytes must be no larger than `isize::MAX`.
+ /// See the safety documentation of [`pointer::offset`].
+ ///
+ /// These requirements are always upheld by any `ptr` that has been allocated
+ /// via `Vec<T, A>`. Other allocation sources are allowed if the invariants are
+ /// upheld.
///
/// Violating these may cause problems like corrupting the allocator's
/// internal data structures. For example it is **not** safe
@@ -673,6 +714,7 @@ impl<T, A: Allocator> Vec<T, A> {
///
/// [`String`]: crate::string::String
/// [`dealloc`]: crate::alloc::GlobalAlloc::dealloc
+ /// [*fit*]: crate::alloc::Allocator#memory-fitting
///
/// # Examples
///
@@ -711,6 +753,29 @@ impl<T, A: Allocator> Vec<T, A> {
/// assert_eq!(rebuilt, [4, 5, 6]);
/// }
/// ```
+ ///
+ /// Using memory that was allocated elsewhere:
+ ///
+ /// ```rust
+ /// use std::alloc::{alloc, Layout};
+ ///
+ /// fn main() {
+ /// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
+ /// let vec = unsafe {
+ /// let mem = alloc(layout).cast::<u32>();
+ /// if mem.is_null() {
+ /// return;
+ /// }
+ ///
+ /// mem.write(1_000_000);
+ ///
+ /// Vec::from_raw_parts(mem, 1, 16)
+ /// };
+ ///
+ /// assert_eq!(vec, &[1_000_000]);
+ /// assert_eq!(vec.capacity(), 16);
+ /// }
+ /// ```
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self {
@@ -803,13 +868,14 @@ impl<T, A: Allocator> Vec<T, A> {
(ptr, len, capacity, alloc)
}
- /// Returns the number of elements the vector can hold without
+ /// Returns the total number of elements the vector can hold without
/// reallocating.
///
/// # Examples
///
/// ```
- /// let vec: Vec<i32> = Vec::with_capacity(10);
+ /// let mut vec: Vec<i32> = Vec::with_capacity(10);
+ /// vec.push(42);
/// assert_eq!(vec.capacity(), 10);
/// ```
#[inline]
@@ -1774,6 +1840,51 @@ impl<T, A: Allocator> Vec<T, A> {
}
}
+ /// Appends an element if there is sufficient spare capacity, otherwise an error is returned
+ /// with the element.
+ ///
+ /// Unlike [`push`] this method will not reallocate when there's insufficient capacity.
+ /// The caller should use [`reserve`] or [`try_reserve`] to ensure that there is enough capacity.
+ ///
+ /// [`push`]: Vec::push
+ /// [`reserve`]: Vec::reserve
+ /// [`try_reserve`]: Vec::try_reserve
+ ///
+ /// # Examples
+ ///
+ /// A manual, panic-free alternative to [`FromIterator`]:
+ ///
+ /// ```
+ /// #![feature(vec_push_within_capacity)]
+ ///
+ /// use std::collections::TryReserveError;
+ /// fn from_iter_fallible<T>(iter: impl Iterator<Item=T>) -> Result<Vec<T>, TryReserveError> {
+ /// let mut vec = Vec::new();
+ /// for value in iter {
+ /// if let Err(value) = vec.push_within_capacity(value) {
+ /// vec.try_reserve(1)?;
+ /// // this cannot fail, the previous line either returned or added at least 1 free slot
+ /// let _ = vec.push_within_capacity(value);
+ /// }
+ /// }
+ /// Ok(vec)
+ /// }
+ /// assert_eq!(from_iter_fallible(0..100), Ok(Vec::from_iter(0..100)));
+ /// ```
+ #[inline]
+ #[unstable(feature = "vec_push_within_capacity", issue = "100486")]
+ pub fn push_within_capacity(&mut self, value: T) -> Result<(), T> {
+ if self.len == self.buf.capacity() {
+ return Err(value);
+ }
+ unsafe {
+ let end = self.as_mut_ptr().add(self.len);
+ ptr::write(end, value);
+ self.len += 1;
+ }
+ Ok(())
+ }
+
/// Removes the last element from a vector and returns it, or [`None`] if it
/// is empty.
///
@@ -1889,9 +2000,7 @@ impl<T, A: Allocator> Vec<T, A> {
unsafe {
// set self.vec length's to start, to be safe in case Drain is leaked
self.set_len(start);
- // Use the borrow in the IterMut to indicate borrowing behavior of the
- // whole Drain iterator (like &mut T).
- let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
+ let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start);
Drain {
tail_start: end,
tail_len: len - end,
@@ -2083,7 +2192,6 @@ impl<T, A: Allocator> Vec<T, A> {
/// static_ref[0] += 1;
/// assert_eq!(static_ref, &[2, 2, 3]);
/// ```
- #[cfg(not(no_global_oom_handling))]
#[stable(feature = "vec_leak", since = "1.47.0")]
#[inline]
pub fn leak<'a>(self) -> &'a mut [T]
@@ -2347,7 +2455,7 @@ impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {
#[unstable(feature = "slice_flatten", issue = "95629")]
pub fn into_flattened(self) -> Vec<T, A> {
let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
- let (new_len, new_cap) = if mem::size_of::<T>() == 0 {
+ let (new_len, new_cap) = if T::IS_ZST {
(len.checked_mul(N).expect("vec len overflow"), usize::MAX)
} else {
// SAFETY:
@@ -2677,7 +2785,7 @@ impl<T, A: Allocator> IntoIterator for Vec<T, A> {
let mut me = ManuallyDrop::new(self);
let alloc = ManuallyDrop::new(ptr::read(me.allocator()));
let begin = me.as_mut_ptr();
- let end = if mem::size_of::<T>() == 0 {
+ let end = if T::IS_ZST {
begin.wrapping_byte_add(me.len())
} else {
begin.add(me.len()) as *const T
diff --git a/library/alloc/tests/autotraits.rs b/library/alloc/tests/autotraits.rs
new file mode 100644
index 000000000..8ff5f0abe
--- /dev/null
+++ b/library/alloc/tests/autotraits.rs
@@ -0,0 +1,293 @@
+fn require_sync<T: Sync>(_: T) {}
+fn require_send_sync<T: Send + Sync>(_: T) {}
+
+struct NotSend(*const ());
+unsafe impl Sync for NotSend {}
+
+#[test]
+fn test_btree_map() {
+ // Tests of this form are prone to https://github.com/rust-lang/rust/issues/64552.
+ //
+ // In theory the async block's future would be Send if the value we hold
+ // across the await point is Send, and Sync if the value we hold across the
+ // await point is Sync.
+ //
+ // We test autotraits in this convoluted way, instead of a straightforward
+ // `require_send_sync::<TypeIWantToTest>()`, because the interaction with
+ // generators exposes some current limitations in rustc's ability to prove a
+ // lifetime bound on the erased generator witness types. See the above link.
+ //
+ // A typical way this would surface in real code is:
+ //
+ // fn spawn<T: Future + Send>(_: T) {}
+ //
+ // async fn f() {
+ // let map = BTreeMap::<u32, Box<dyn Send + Sync>>::new();
+ // for _ in &map {
+ // async {}.await;
+ // }
+ // }
+ //
+ // fn main() {
+ // spawn(f());
+ // }
+ //
+ // where with some unintentionally overconstrained Send impls in liballoc's
+ // internals, the future might incorrectly not be Send even though every
+ // single type involved in the program is Send and Sync.
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::Iter<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ // Testing like this would not catch all issues that the above form catches.
+ require_send_sync(None::<alloc::collections::btree_map::Iter<'_, &u32, &u32>>);
+
+ require_sync(async {
+ let _v = None::<alloc::collections::btree_map::Iter<'_, u32, NotSend>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::BTreeMap<&u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<
+ alloc::collections::btree_map::DrainFilter<
+ '_,
+ &u32,
+ &u32,
+ fn(&&u32, &mut &u32) -> bool,
+ >,
+ >;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::Entry<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::IntoIter<&u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::IntoKeys<&u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::IntoValues<&u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::Iter<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::IterMut<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::Keys<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::OccupiedEntry<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::OccupiedError<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::Range<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::RangeMut<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::VacantEntry<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::Values<'_, &u32, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_map::ValuesMut<'_, &u32, &u32>>;
+ async {}.await;
+ });
+}
+
+#[test]
+fn test_btree_set() {
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::BTreeSet<&u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::Difference<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::DrainFilter<'_, &u32, fn(&&u32) -> bool>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::Intersection<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::IntoIter<&u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::Iter<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::Range<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::SymmetricDifference<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::btree_set::Union<'_, &u32>>;
+ async {}.await;
+ });
+}
+
+#[test]
+fn test_binary_heap() {
+ require_send_sync(async {
+ let _v = None::<alloc::collections::binary_heap::BinaryHeap<&u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::binary_heap::Drain<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::binary_heap::DrainSorted<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::binary_heap::IntoIter<&u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::binary_heap::IntoIterSorted<&u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::binary_heap::Iter<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::binary_heap::PeekMut<'_, &u32>>;
+ async {}.await;
+ });
+}
+
+#[test]
+fn test_linked_list() {
+ require_send_sync(async {
+ let _v = None::<alloc::collections::linked_list::Cursor<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::linked_list::CursorMut<'_, &u32>>;
+ async {}.await;
+ });
+
+ // FIXME
+ /*
+ require_send_sync(async {
+ let _v =
+ None::<alloc::collections::linked_list::DrainFilter<'_, &u32, fn(&mut &u32) -> bool>>;
+ async {}.await;
+ });
+ */
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::linked_list::IntoIter<&u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::linked_list::Iter<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::linked_list::IterMut<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::linked_list::LinkedList<&u32>>;
+ async {}.await;
+ });
+}
+
+#[test]
+fn test_vec_deque() {
+ require_send_sync(async {
+ let _v = None::<alloc::collections::vec_deque::Drain<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::vec_deque::IntoIter<&u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::vec_deque::Iter<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::vec_deque::IterMut<'_, &u32>>;
+ async {}.await;
+ });
+
+ require_send_sync(async {
+ let _v = None::<alloc::collections::vec_deque::VecDeque<&u32>>;
+ async {}.await;
+ });
+}
diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs
index 490c0d8f7..ffc5ca7a5 100644
--- a/library/alloc/tests/lib.rs
+++ b/library/alloc/tests/lib.rs
@@ -2,6 +2,7 @@
#![feature(alloc_layout_extra)]
#![feature(assert_matches)]
#![feature(box_syntax)]
+#![feature(btree_drain_filter)]
#![feature(cow_is_borrowed)]
#![feature(const_box)]
#![feature(const_convert)]
@@ -14,6 +15,8 @@
#![feature(core_intrinsics)]
#![feature(drain_filter)]
#![feature(exact_size_is_empty)]
+#![feature(linked_list_cursors)]
+#![feature(map_try_insert)]
#![feature(new_uninit)]
#![feature(pattern)]
#![feature(trusted_len)]
@@ -32,7 +35,7 @@
#![feature(slice_group_by)]
#![feature(slice_partition_dedup)]
#![feature(string_remove_matches)]
-#![feature(const_btree_new)]
+#![feature(const_btree_len)]
#![feature(const_default_impls)]
#![feature(const_trait_impl)]
#![feature(const_str_from_utf8)]
@@ -41,7 +44,6 @@
#![feature(pointer_is_aligned)]
#![feature(slice_flatten)]
#![feature(thin_box)]
-#![feature(bench_black_box)]
#![feature(strict_provenance)]
#![feature(once_cell)]
#![feature(drain_keep_rest)]
@@ -50,6 +52,7 @@ use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
mod arc;
+mod autotraits;
mod borrow;
mod boxed;
mod btree_set_hash;
diff --git a/library/alloc/tests/vec.rs b/library/alloc/tests/vec.rs
index f140fc414..e02711870 100644
--- a/library/alloc/tests/vec.rs
+++ b/library/alloc/tests/vec.rs
@@ -1191,48 +1191,53 @@ fn test_from_iter_specialization_panic_during_iteration_drops() {
}
#[test]
-fn test_from_iter_specialization_panic_during_drop_leaks() {
- static mut DROP_COUNTER: usize = 0;
+fn test_from_iter_specialization_panic_during_drop_doesnt_leak() {
+ static mut DROP_COUNTER_OLD: [usize; 5] = [0; 5];
+ static mut DROP_COUNTER_NEW: [usize; 2] = [0; 2];
#[derive(Debug)]
- enum Droppable {
- DroppedTwice(Box<i32>),
- PanicOnDrop,
- }
+ struct Old(usize);
- impl Drop for Droppable {
+ impl Drop for Old {
fn drop(&mut self) {
- match self {
- Droppable::DroppedTwice(_) => {
- unsafe {
- DROP_COUNTER += 1;
- }
- println!("Dropping!")
- }
- Droppable::PanicOnDrop => {
- if !std::thread::panicking() {
- panic!();
- }
- }
+ unsafe {
+ DROP_COUNTER_OLD[self.0] += 1;
+ }
+
+ if self.0 == 3 {
+ panic!();
}
+
+ println!("Dropped Old: {}", self.0);
}
}
- let mut to_free: *mut Droppable = core::ptr::null_mut();
- let mut cap = 0;
+ #[derive(Debug)]
+ struct New(usize);
+
+ impl Drop for New {
+ fn drop(&mut self) {
+ unsafe {
+ DROP_COUNTER_NEW[self.0] += 1;
+ }
+
+ println!("Dropped New: {}", self.0);
+ }
+ }
let _ = std::panic::catch_unwind(AssertUnwindSafe(|| {
- let mut v = vec![Droppable::DroppedTwice(Box::new(123)), Droppable::PanicOnDrop];
- to_free = v.as_mut_ptr();
- cap = v.capacity();
- let _ = v.into_iter().take(0).collect::<Vec<_>>();
+ let v = vec![Old(0), Old(1), Old(2), Old(3), Old(4)];
+ let _ = v.into_iter().map(|x| New(x.0)).take(2).collect::<Vec<_>>();
}));
- assert_eq!(unsafe { DROP_COUNTER }, 1);
- // clean up the leak to keep miri happy
- unsafe {
- drop(Vec::from_raw_parts(to_free, 0, cap));
- }
+ assert_eq!(unsafe { DROP_COUNTER_OLD[0] }, 1);
+ assert_eq!(unsafe { DROP_COUNTER_OLD[1] }, 1);
+ assert_eq!(unsafe { DROP_COUNTER_OLD[2] }, 1);
+ assert_eq!(unsafe { DROP_COUNTER_OLD[3] }, 1);
+ assert_eq!(unsafe { DROP_COUNTER_OLD[4] }, 1);
+
+ assert_eq!(unsafe { DROP_COUNTER_NEW[0] }, 1);
+ assert_eq!(unsafe { DROP_COUNTER_NEW[1] }, 1);
}
// regression test for issue #85322. Peekable previously implemented InPlaceIterable,