summaryrefslogtreecommitdiffstats
path: root/library/alloc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
commitd1b2d29528b7794b41e66fc2136e395a02f8529b (patch)
treea4a17504b260206dec3cf55b2dca82929a348ac2 /library/alloc
parentReleasing progress-linux version 1.72.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.tar.xz
rustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.zip
Merging upstream version 1.73.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'library/alloc')
-rw-r--r--library/alloc/src/alloc.rs22
-rw-r--r--library/alloc/src/boxed.rs48
-rw-r--r--library/alloc/src/collections/btree/map.rs62
-rw-r--r--library/alloc/src/collections/btree/set.rs13
-rw-r--r--library/alloc/src/collections/linked_list.rs24
-rw-r--r--library/alloc/src/collections/vec_deque/mod.rs46
-rw-r--r--library/alloc/src/collections/vec_deque/tests.rs4
-rw-r--r--library/alloc/src/lib.rs5
-rw-r--r--library/alloc/src/raw_vec.rs30
-rw-r--r--library/alloc/src/rc.rs899
-rw-r--r--library/alloc/src/str.rs8
-rw-r--r--library/alloc/src/string.rs98
-rw-r--r--library/alloc/src/sync.rs945
-rw-r--r--library/alloc/src/vec/mod.rs4
-rw-r--r--library/alloc/src/vec/spec_extend.rs4
-rw-r--r--library/alloc/tests/str.rs31
-rw-r--r--library/alloc/tests/string.rs48
-rw-r--r--library/alloc/tests/vec.rs65
18 files changed, 1793 insertions, 563 deletions
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs
index e24a0fe51..5205ed9fb 100644
--- a/library/alloc/src/alloc.rs
+++ b/library/alloc/src/alloc.rs
@@ -4,11 +4,7 @@
#[cfg(not(test))]
use core::intrinsics;
-#[cfg(all(bootstrap, not(test)))]
-use core::intrinsics::{min_align_of_val, size_of_val};
-#[cfg(all(bootstrap, not(test)))]
-use core::ptr::Unique;
#[cfg(not(test))]
use core::ptr::{self, NonNull};
@@ -337,23 +333,6 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
}
}
-#[cfg(all(bootstrap, not(test)))]
-#[lang = "box_free"]
-#[inline]
-// This signature has to be the same as `Box`, otherwise an ICE will happen.
-// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as
-// well.
-// For example if `Box` is changed to `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`,
-// this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well.
-unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
- unsafe {
- let size = size_of_val(ptr.as_ref());
- let align = min_align_of_val(ptr.as_ref());
- let layout = Layout::from_size_align_unchecked(size, align);
- alloc.deallocate(From::from(ptr.cast()), layout)
- }
-}
-
// # Allocation error handler
#[cfg(not(no_global_oom_handling))]
@@ -413,7 +392,6 @@ pub mod __alloc_error_handler {
static __rust_alloc_error_handler_should_panic: u8;
}
- #[allow(unused_unsafe)]
if unsafe { __rust_alloc_error_handler_should_panic != 0 } {
panic!("memory allocation of {size} bytes failed")
} else {
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs
index 8ef2bac92..96b93830f 100644
--- a/library/alloc/src/boxed.rs
+++ b/library/alloc/src/boxed.rs
@@ -157,12 +157,12 @@ use core::hash::{Hash, Hasher};
use core::iter::FusedIterator;
use core::marker::Tuple;
use core::marker::Unsize;
-use core::mem;
+use core::mem::{self, SizedTypeProperties};
use core::ops::{
CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
};
use core::pin::Pin;
-use core::ptr::{self, Unique};
+use core::ptr::{self, NonNull, Unique};
use core::task::{Context, Poll};
#[cfg(not(no_global_oom_handling))]
@@ -479,8 +479,12 @@ impl<T, A: Allocator> Box<T, A> {
where
A: Allocator,
{
- let layout = Layout::new::<mem::MaybeUninit<T>>();
- let ptr = alloc.allocate(layout)?.cast();
+ let ptr = if T::IS_ZST {
+ NonNull::dangling()
+ } else {
+ let layout = Layout::new::<mem::MaybeUninit<T>>();
+ alloc.allocate(layout)?.cast()
+ };
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
}
@@ -549,8 +553,12 @@ impl<T, A: Allocator> Box<T, A> {
where
A: Allocator,
{
- let layout = Layout::new::<mem::MaybeUninit<T>>();
- let ptr = alloc.allocate_zeroed(layout)?.cast();
+ let ptr = if T::IS_ZST {
+ NonNull::dangling()
+ } else {
+ let layout = Layout::new::<mem::MaybeUninit<T>>();
+ alloc.allocate_zeroed(layout)?.cast()
+ };
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
}
@@ -675,14 +683,16 @@ impl<T> Box<[T]> {
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn try_new_uninit_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
- unsafe {
+ let ptr = if T::IS_ZST || len == 0 {
+ NonNull::dangling()
+ } else {
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
Ok(l) => l,
Err(_) => return Err(AllocError),
};
- let ptr = Global.allocate(layout)?;
- Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
- }
+ Global.allocate(layout)?.cast()
+ };
+ unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
}
/// Constructs a new boxed slice with uninitialized contents, with the memory
@@ -707,14 +717,16 @@ impl<T> Box<[T]> {
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn try_new_zeroed_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
- unsafe {
+ let ptr = if T::IS_ZST || len == 0 {
+ NonNull::dangling()
+ } else {
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
Ok(l) => l,
Err(_) => return Err(AllocError),
};
- let ptr = Global.allocate_zeroed(layout)?;
- Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
- }
+ Global.allocate_zeroed(layout)?.cast()
+ };
+ unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
}
}
@@ -1219,7 +1231,9 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
unsafe {
let layout = Layout::for_value_raw(ptr.as_ptr());
- self.1.deallocate(From::from(ptr.cast()), layout)
+ if layout.size() != 0 {
+ self.1.deallocate(From::from(ptr.cast()), layout);
+ }
}
}
}
@@ -2169,7 +2183,7 @@ impl dyn Error + Send {
let err: Box<dyn Error> = self;
<dyn Error>::downcast(err).map_err(|s| unsafe {
// Reapply the `Send` marker.
- mem::transmute::<Box<dyn Error>, Box<dyn Error + Send>>(s)
+ Box::from_raw(Box::into_raw(s) as *mut (dyn Error + Send))
})
}
}
@@ -2183,7 +2197,7 @@ impl dyn Error + Send + Sync {
let err: Box<dyn Error> = self;
<dyn Error>::downcast(err).map_err(|s| unsafe {
// Reapply the `Send + Sync` marker.
- mem::transmute::<Box<dyn Error>, Box<dyn Error + Send + Sync>>(s)
+ Box::from_raw(Box::into_raw(s) as *mut (dyn Error + Send + Sync))
})
}
}
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index ff908ec12..5481b327d 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -613,8 +613,6 @@ impl<K, V> BTreeMap<K, V> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -636,8 +634,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -661,8 +657,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// # #![feature(allocator_api)]
/// # #![feature(btreemap_alloc)]
@@ -688,8 +682,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -744,8 +736,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -830,8 +820,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -917,8 +905,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -943,8 +929,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -982,8 +966,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -1017,8 +999,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// #![feature(map_try_insert)]
///
@@ -1051,8 +1031,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -1078,8 +1056,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -1208,8 +1184,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
/// use std::ops::Bound::Included;
@@ -1251,8 +1225,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -1283,8 +1255,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -1336,8 +1306,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2388,8 +2356,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2420,8 +2386,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2453,8 +2417,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2474,8 +2436,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2495,8 +2455,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2521,8 +2479,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2546,8 +2502,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::collections::BTreeMap;
///
@@ -2578,8 +2532,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// #![feature(btree_cursors)]
///
@@ -2591,6 +2543,8 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// a.insert(2, "b");
/// a.insert(3, "c");
/// a.insert(4, "c");
+ /// let cursor = a.lower_bound(Bound::Included(&2));
+ /// assert_eq!(cursor.key(), Some(&2));
/// let cursor = a.lower_bound(Bound::Excluded(&2));
/// assert_eq!(cursor.key(), Some(&3));
/// ```
@@ -2619,8 +2573,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// #![feature(btree_cursors)]
///
@@ -2632,6 +2584,8 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// a.insert(2, "b");
/// a.insert(3, "c");
/// a.insert(4, "c");
+ /// let cursor = a.lower_bound_mut(Bound::Included(&2));
+ /// assert_eq!(cursor.key(), Some(&2));
/// let cursor = a.lower_bound_mut(Bound::Excluded(&2));
/// assert_eq!(cursor.key(), Some(&3));
/// ```
@@ -2673,8 +2627,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// #![feature(btree_cursors)]
///
@@ -2686,6 +2638,8 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// a.insert(2, "b");
/// a.insert(3, "c");
/// a.insert(4, "c");
+ /// let cursor = a.upper_bound(Bound::Included(&3));
+ /// assert_eq!(cursor.key(), Some(&3));
/// let cursor = a.upper_bound(Bound::Excluded(&3));
/// assert_eq!(cursor.key(), Some(&2));
/// ```
@@ -2714,8 +2668,6 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// #![feature(btree_cursors)]
///
@@ -2727,6 +2679,8 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// a.insert(2, "b");
/// a.insert(3, "c");
/// a.insert(4, "c");
+ /// let cursor = a.upper_bound_mut(Bound::Included(&3));
+ /// assert_eq!(cursor.key(), Some(&3));
/// let cursor = a.upper_bound_mut(Bound::Excluded(&3));
/// assert_eq!(cursor.key(), Some(&2));
/// ```
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
index c4461040b..9da230915 100644
--- a/library/alloc/src/collections/btree/set.rs
+++ b/library/alloc/src/collections/btree/set.rs
@@ -1121,19 +1121,6 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// ```
/// use std::collections::BTreeSet;
///
- /// let set = BTreeSet::from([1, 2, 3]);
- /// let mut set_iter = set.iter();
- /// assert_eq!(set_iter.next(), Some(&1));
- /// assert_eq!(set_iter.next(), Some(&2));
- /// assert_eq!(set_iter.next(), Some(&3));
- /// assert_eq!(set_iter.next(), None);
- /// ```
- ///
- /// Values returned by the iterator are returned in ascending order:
- ///
- /// ```
- /// use std::collections::BTreeSet;
- ///
/// let set = BTreeSet::from([3, 1, 2]);
/// let mut set_iter = set.iter();
/// assert_eq!(set_iter.next(), Some(&1));
diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs
index 052edf453..2c26f9e03 100644
--- a/library/alloc/src/collections/linked_list.rs
+++ b/library/alloc/src/collections/linked_list.rs
@@ -18,7 +18,7 @@ use core::hash::{Hash, Hasher};
use core::iter::FusedIterator;
use core::marker::PhantomData;
use core::mem;
-use core::ptr::{NonNull, Unique};
+use core::ptr::NonNull;
use super::SpecExtend;
use crate::alloc::{Allocator, Global};
@@ -168,15 +168,16 @@ impl<T, A: Allocator> LinkedList<T, A> {
/// Adds the given node to the front of the list.
///
/// # Safety
- /// `node` must point to a valid node that was boxed using the list's allocator.
+ /// `node` must point to a valid node that was boxed and leaked using the list's allocator.
+ /// This method takes ownership of the node, so the pointer should not be used again.
#[inline]
- unsafe fn push_front_node(&mut self, node: Unique<Node<T>>) {
+ unsafe fn push_front_node(&mut self, node: NonNull<Node<T>>) {
// This method takes care not to create mutable references to whole nodes,
// to maintain validity of aliasing pointers into `element`.
unsafe {
(*node.as_ptr()).next = self.head;
(*node.as_ptr()).prev = None;
- let node = Some(NonNull::from(node));
+ let node = Some(node);
match self.head {
None => self.tail = node,
@@ -212,15 +213,16 @@ impl<T, A: Allocator> LinkedList<T, A> {
/// Adds the given node to the back of the list.
///
/// # Safety
- /// `node` must point to a valid node that was boxed using the list's allocator.
+ /// `node` must point to a valid node that was boxed and leaked using the list's allocator.
+ /// This method takes ownership of the node, so the pointer should not be used again.
#[inline]
- unsafe fn push_back_node(&mut self, node: Unique<Node<T>>) {
+ unsafe fn push_back_node(&mut self, node: NonNull<Node<T>>) {
// This method takes care not to create mutable references to whole nodes,
// to maintain validity of aliasing pointers into `element`.
unsafe {
(*node.as_ptr()).next = None;
(*node.as_ptr()).prev = self.tail;
- let node = Some(NonNull::from(node));
+ let node = Some(node);
match self.tail {
None => self.head = node,
@@ -842,8 +844,8 @@ impl<T, A: Allocator> LinkedList<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_front(&mut self, elt: T) {
let node = Box::new_in(Node::new(elt), &self.alloc);
- let node_ptr = Unique::from(Box::leak(node));
- // SAFETY: node_ptr is a unique pointer to a node we boxed with self.alloc
+ let node_ptr = NonNull::from(Box::leak(node));
+ // SAFETY: node_ptr is a unique pointer to a node we boxed with self.alloc and leaked
unsafe {
self.push_front_node(node_ptr);
}
@@ -890,8 +892,8 @@ impl<T, A: Allocator> LinkedList<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_back(&mut self, elt: T) {
let node = Box::new_in(Node::new(elt), &self.alloc);
- let node_ptr = Unique::from(Box::leak(node));
- // SAFETY: node_ptr is a unique pointer to a node we boxed with self.alloc
+ let node_ptr = NonNull::from(Box::leak(node));
+ // SAFETY: node_ptr is a unique pointer to a node we boxed with self.alloc and leaked
unsafe {
self.push_back_node(node_ptr);
}
diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs
index 896da37f9..5965ec2af 100644
--- a/library/alloc/src/collections/vec_deque/mod.rs
+++ b/library/alloc/src/collections/vec_deque/mod.rs
@@ -2283,21 +2283,21 @@ impl<T, A: Allocator> VecDeque<T, A> {
unsafe { slice::from_raw_parts_mut(ptr.add(self.head), self.len) }
}
- /// Rotates the double-ended queue `mid` places to the left.
+ /// Rotates the double-ended queue `n` places to the left.
///
/// Equivalently,
- /// - Rotates item `mid` into the first position.
- /// - Pops the first `mid` items and pushes them to the end.
- /// - Rotates `len() - mid` places to the right.
+ /// - Rotates item `n` into the first position.
+ /// - Pops the first `n` items and pushes them to the end.
+ /// - Rotates `len() - n` places to the right.
///
/// # Panics
///
- /// If `mid` is greater than `len()`. Note that `mid == len()`
+ /// If `n` is greater than `len()`. Note that `n == len()`
/// does _not_ panic and is a no-op rotation.
///
/// # Complexity
///
- /// Takes `*O*(min(mid, len() - mid))` time and no extra space.
+ /// Takes `*O*(min(n, len() - n))` time and no extra space.
///
/// # Examples
///
@@ -2316,31 +2316,31 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
/// ```
#[stable(feature = "vecdeque_rotate", since = "1.36.0")]
- pub fn rotate_left(&mut self, mid: usize) {
- assert!(mid <= self.len());
- let k = self.len - mid;
- if mid <= k {
- unsafe { self.rotate_left_inner(mid) }
+ pub fn rotate_left(&mut self, n: usize) {
+ assert!(n <= self.len());
+ let k = self.len - n;
+ if n <= k {
+ unsafe { self.rotate_left_inner(n) }
} else {
unsafe { self.rotate_right_inner(k) }
}
}
- /// Rotates the double-ended queue `k` places to the right.
+ /// Rotates the double-ended queue `n` places to the right.
///
/// Equivalently,
- /// - Rotates the first item into position `k`.
- /// - Pops the last `k` items and pushes them to the front.
- /// - Rotates `len() - k` places to the left.
+ /// - Rotates the first item into position `n`.
+ /// - Pops the last `n` items and pushes them to the front.
+ /// - Rotates `len() - n` places to the left.
///
/// # Panics
///
- /// If `k` is greater than `len()`. Note that `k == len()`
+ /// If `n` is greater than `len()`. Note that `n == len()`
/// does _not_ panic and is a no-op rotation.
///
/// # Complexity
///
- /// Takes `*O*(min(k, len() - k))` time and no extra space.
+ /// Takes `*O*(min(n, len() - n))` time and no extra space.
///
/// # Examples
///
@@ -2359,13 +2359,13 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
/// ```
#[stable(feature = "vecdeque_rotate", since = "1.36.0")]
- pub fn rotate_right(&mut self, k: usize) {
- assert!(k <= self.len());
- let mid = self.len - k;
- if k <= mid {
- unsafe { self.rotate_right_inner(k) }
+ pub fn rotate_right(&mut self, n: usize) {
+ assert!(n <= self.len());
+ let k = self.len - n;
+ if n <= k {
+ unsafe { self.rotate_right_inner(n) }
} else {
- unsafe { self.rotate_left_inner(mid) }
+ unsafe { self.rotate_left_inner(k) }
}
}
diff --git a/library/alloc/src/collections/vec_deque/tests.rs b/library/alloc/src/collections/vec_deque/tests.rs
index 205a8ff3c..b7fdebfa6 100644
--- a/library/alloc/src/collections/vec_deque/tests.rs
+++ b/library/alloc/src/collections/vec_deque/tests.rs
@@ -351,14 +351,14 @@ fn test_rotate_left_right() {
}
#[test]
-#[should_panic = "assertion failed: mid <= self.len()"]
+#[should_panic = "assertion failed: n <= self.len()"]
fn test_rotate_left_panic() {
let mut tester: VecDeque<_> = (1..=10).collect();
tester.rotate_left(tester.len() + 1);
}
#[test]
-#[should_panic = "assertion failed: k <= self.len()"]
+#[should_panic = "assertion failed: n <= self.len()"]
fn test_rotate_right_panic() {
let mut tester: VecDeque<_> = (1..=10).collect();
tester.rotate_right(tester.len() + 1);
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index 967ad3a0e..ffe6d6373 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -58,7 +58,7 @@
// To run alloc tests without x.py without ending up with two copies of alloc, Miri needs to be
// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
-// rustc itself never sets the feature, so this line has no affect there.
+// rustc itself never sets the feature, so this line has no effect there.
#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
//
#![allow(unused_attributes)]
@@ -88,6 +88,8 @@
#![warn(missing_docs)]
#![allow(explicit_outlives_requirements)]
#![warn(multiple_supertrait_upcastable)]
+#![cfg_attr(not(bootstrap), allow(internal_features))]
+#![cfg_attr(not(bootstrap), allow(rustdoc::redundant_explicit_links))]
//
// Library features:
// tidy-alphabetical-start
@@ -137,7 +139,6 @@
#![feature(maybe_uninit_uninit_array_transpose)]
#![feature(pattern)]
#![feature(pointer_byte_offsets)]
-#![feature(provide_any)]
#![feature(ptr_internals)]
#![feature(ptr_metadata)]
#![feature(ptr_sub_ptr)]
diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs
index dfd30d99c..01b03de6a 100644
--- a/library/alloc/src/raw_vec.rs
+++ b/library/alloc/src/raw_vec.rs
@@ -432,16 +432,26 @@ impl<T, A: Allocator> RawVec<T, A> {
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
// See current_memory() why this assert is here
let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
- let ptr = unsafe {
- // `Layout::array` cannot overflow here because it would have
- // overflowed earlier when capacity was larger.
- let new_size = mem::size_of::<T>().unchecked_mul(cap);
- let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
- self.alloc
- .shrink(ptr, layout, new_layout)
- .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
- };
- self.set_ptr_and_cap(ptr, cap);
+
+ // If shrinking to 0, deallocate the buffer. We don't reach this point
+ // for the T::IS_ZST case since current_memory() will have returned
+ // None.
+ if cap == 0 {
+ unsafe { self.alloc.deallocate(ptr, layout) };
+ self.ptr = Unique::dangling();
+ self.cap = 0;
+ } else {
+ let ptr = unsafe {
+ // `Layout::array` cannot overflow here because it would have
+ // overflowed earlier when capacity was larger.
+ let new_size = mem::size_of::<T>().unchecked_mul(cap);
+ let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
+ self.alloc
+ .shrink(ptr, layout, new_layout)
+ .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
+ };
+ self.set_ptr_and_cap(ptr, cap);
+ }
Ok(())
}
}
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index b3305b8ca..c485680f9 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -313,13 +313,17 @@ fn rcbox_layout_for_value_layout(layout: Layout) -> Layout {
#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_insignificant_dtor]
-pub struct Rc<T: ?Sized> {
+pub struct Rc<
+ T: ?Sized,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
ptr: NonNull<RcBox<T>>,
phantom: PhantomData<RcBox<T>>,
+ alloc: A,
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> !Send for Rc<T> {}
+impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
// Note that this negative impl isn't strictly necessary for correctness,
// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
@@ -327,20 +331,32 @@ impl<T: ?Sized> !Send for Rc<T> {}
// having an explicit negative impl is nice for documentation purposes
// and results in nicer error messages.
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> !Sync for Rc<T> {}
+impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
-impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Rc<T> {}
+impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
-impl<T: RefUnwindSafe + ?Sized> RefUnwindSafe for Rc<T> {}
+impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
#[unstable(feature = "coerce_unsized", issue = "18598")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
impl<T: ?Sized> Rc<T> {
+ #[inline]
+ unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
+ unsafe { Self::from_inner_in(ptr, Global) }
+ }
+
+ #[inline]
+ unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
+ unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Rc<T, A> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
// This unsafety is ok because while this Rc is alive we're guaranteed
@@ -348,12 +364,14 @@ impl<T: ?Sized> Rc<T> {
unsafe { self.ptr.as_ref() }
}
- unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
- Self { ptr, phantom: PhantomData }
+ #[inline]
+ unsafe fn from_inner_in(ptr: NonNull<RcBox<T>>, alloc: A) -> Self {
+ Self { ptr, phantom: PhantomData, alloc }
}
- unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
- unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
+ #[inline]
+ unsafe fn from_ptr_in(ptr: *mut RcBox<T>, alloc: A) -> Self {
+ unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
}
}
@@ -450,7 +468,7 @@ impl<T> Rc<T> {
let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
- let weak = Weak { ptr: init_ptr };
+ let weak = Weak { ptr: init_ptr, alloc: Global };
// It's important we don't give up ownership of the weak pointer, or
// else the memory might be freed by the time `data_fn` returns. If
@@ -504,7 +522,7 @@ impl<T> Rc<T> {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
))
}
}
@@ -537,7 +555,7 @@ impl<T> Rc<T> {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate_zeroed(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
))
}
}
@@ -594,7 +612,7 @@ impl<T> Rc<T> {
Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
)?))
}
}
@@ -627,7 +645,7 @@ impl<T> Rc<T> {
Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate_zeroed(layout),
- |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
)?))
}
}
@@ -639,6 +657,235 @@ impl<T> Rc<T> {
pub fn pin(value: T) -> Pin<Rc<T>> {
unsafe { Pin::new_unchecked(Rc::new(value)) }
}
+}
+
+impl<T, A: Allocator> Rc<T, A> {
+ /// Returns a reference to the underlying allocator.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn allocator(this: &Self) -> &A {
+ &this.alloc
+ }
+ /// Constructs a new `Rc` in the provided allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let five = Rc::new_in(5, System);
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
+ // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
+ // That would make code size bigger.
+ match Self::try_new_in(value, alloc) {
+ Ok(m) => m,
+ Err(_) => handle_alloc_error(Layout::new::<RcBox<T>>()),
+ }
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let mut five = Rc::<u32, _>::new_uninit_in(System);
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
+ unsafe {
+ Rc::from_ptr_in(
+ Rc::allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate(layout),
+ <*mut u8>::cast,
+ ),
+ alloc,
+ )
+ }
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes, in the provided allocator.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let zero = Rc::<u32, _>::new_zeroed_in(System);
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0)
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
+ unsafe {
+ Rc::from_ptr_in(
+ Rc::allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate_zeroed(layout),
+ <*mut u8>::cast,
+ ),
+ alloc,
+ )
+ }
+ }
+
+ /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
+ /// fails
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let five = Rc::try_new_in(5, System);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
+ // There is an implicit weak pointer owned by all the strong
+ // pointers, which ensures that the weak destructor never frees
+ // the allocation while the strong destructor is running, even
+ // if the weak pointer is stored inside the strong one.
+ let (ptr, alloc) = Box::into_unique(Box::try_new_in(
+ RcBox { strong: Cell::new(1), weak: Cell::new(1), value },
+ alloc,
+ )?);
+ Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
+ /// error if the allocation fails
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api, new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
+ unsafe {
+ Ok(Rc::from_ptr_in(
+ Rc::try_allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate(layout),
+ <*mut u8>::cast,
+ )?,
+ alloc,
+ ))
+ }
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
+ /// fails
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api, new_uninit)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ //#[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
+ unsafe {
+ Ok(Rc::from_ptr_in(
+ Rc::try_allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate_zeroed(layout),
+ <*mut u8>::cast,
+ )?,
+ alloc,
+ ))
+ }
+ }
+
+ /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
+ /// `value` will be pinned in memory and unable to be moved.
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn pin_in(value: T, alloc: A) -> Pin<Self> {
+ unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
+ }
/// Returns the inner value, if the `Rc` has exactly one strong reference.
///
@@ -665,13 +912,14 @@ impl<T> Rc<T> {
if Rc::strong_count(&this) == 1 {
unsafe {
let val = ptr::read(&*this); // copy the contained object
+ let alloc = ptr::read(&this.alloc); // copy the allocator
// Indicate to Weaks that they can't be promoted by decrementing
// the strong count, and then remove the implicit "strong weak"
// pointer while also handling drop logic by just crafting a
// fake Weak.
this.inner().dec_strong();
- let _weak = Weak { ptr: this.ptr };
+ let _weak = Weak { ptr: this.ptr, alloc };
forget(this);
Ok(val)
}
@@ -758,7 +1006,7 @@ impl<T> Rc<[T]> {
Layout::array::<T>(len).unwrap(),
|layout| Global.allocate_zeroed(layout),
|mem| {
- ptr::slice_from_raw_parts_mut(mem as *mut T, len)
+ ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
as *mut RcBox<[mem::MaybeUninit<T>]>
},
))
@@ -766,7 +1014,84 @@ impl<T> Rc<[T]> {
}
}
-impl<T> Rc<mem::MaybeUninit<T>> {
+impl<T, A: Allocator> Rc<[T], A> {
+ /// Constructs a new reference-counted slice with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+ /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+ /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
+ unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
+ }
+
+ /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
+ /// filled with `0` bytes.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::rc::Rc;
+ /// use std::alloc::System;
+ ///
+ /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
+ /// let values = unsafe { values.assume_init() };
+ ///
+ /// assert_eq!(*values, [0, 0, 0])
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
+ unsafe {
+ Rc::from_ptr_in(
+ Rc::allocate_for_layout(
+ Layout::array::<T>(len).unwrap(),
+ |layout| alloc.allocate_zeroed(layout),
+ |mem| {
+ ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
+ as *mut RcBox<[mem::MaybeUninit<T>]>
+ },
+ ),
+ alloc,
+ )
+ }
+ }
+}
+
+impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
/// Converts to `Rc<T>`.
///
/// # Safety
@@ -798,12 +1123,16 @@ impl<T> Rc<mem::MaybeUninit<T>> {
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
- pub unsafe fn assume_init(self) -> Rc<T> {
- unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
+ pub unsafe fn assume_init(self) -> Rc<T, A>
+ where
+ A: Clone,
+ {
+ let md_self = mem::ManuallyDrop::new(self);
+ unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
}
}
-impl<T> Rc<[mem::MaybeUninit<T>]> {
+impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
/// Converts to `Rc<[T]>`.
///
/// # Safety
@@ -838,12 +1167,128 @@ impl<T> Rc<[mem::MaybeUninit<T>]> {
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
- pub unsafe fn assume_init(self) -> Rc<[T]> {
- unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
+ pub unsafe fn assume_init(self) -> Rc<[T], A>
+ where
+ A: Clone,
+ {
+ let md_self = mem::ManuallyDrop::new(self);
+ unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
}
}
impl<T: ?Sized> Rc<T> {
+ /// Constructs an `Rc<T>` from a raw pointer.
+ ///
+ /// The raw pointer must have been previously returned by a call to
+ /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
+ /// and alignment as `T`. This is trivially true if `U` is `T`.
+ /// Note that if `U` is not `T` but has the same size and alignment, this is
+ /// basically like transmuting references of different types. See
+ /// [`mem::transmute`][transmute] for more information on what
+ /// restrictions apply in this case.
+ ///
+ /// The raw pointer must point to a block of memory allocated by the global allocator
+ ///
+ /// The user of `from_raw` has to make sure a specific value of `T` is only
+ /// dropped once.
+ ///
+ /// This function is unsafe because improper use may lead to memory unsafety,
+ /// even if the returned `Rc<T>` is never accessed.
+ ///
+ /// [into_raw]: Rc::into_raw
+ /// [transmute]: core::mem::transmute
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let x = Rc::new("hello".to_owned());
+ /// let x_ptr = Rc::into_raw(x);
+ ///
+ /// unsafe {
+ /// // Convert back to an `Rc` to prevent leak.
+ /// let x = Rc::from_raw(x_ptr);
+ /// assert_eq!(&*x, "hello");
+ ///
+ /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
+ /// }
+ ///
+ /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ unsafe { Self::from_raw_in(ptr, Global) }
+ }
+
+ /// Increments the strong reference count on the `Rc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Rc::into_raw`, the
+ /// associated `Rc` instance must be valid (i.e. the strong count must be at
+ /// least 1) for the duration of this method, and `ptr` must point to a block of memory
+ /// allocated by the global allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Rc::into_raw(five);
+ /// Rc::increment_strong_count(ptr);
+ ///
+ /// let five = Rc::from_raw(ptr);
+ /// assert_eq!(2, Rc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
+ pub unsafe fn increment_strong_count(ptr: *const T) {
+ unsafe { Self::increment_strong_count_in(ptr, Global) }
+ }
+
+ /// Decrements the strong reference count on the `Rc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Rc::into_raw`, the
+ /// associated `Rc` instance must be valid (i.e. the strong count must be at
+ /// least 1) when invoking this method, and `ptr` must point to a block of memory
+ /// allocated by the global allocator. This method can be used to release the final `Rc` and
+ /// backing storage, but **should not** be called after the final `Rc` has been released.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Rc::into_raw(five);
+ /// Rc::increment_strong_count(ptr);
+ ///
+ /// let five = Rc::from_raw(ptr);
+ /// assert_eq!(2, Rc::strong_count(&five));
+ /// Rc::decrement_strong_count(ptr);
+ /// assert_eq!(1, Rc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
+ pub unsafe fn decrement_strong_count(ptr: *const T) {
+ unsafe { Self::decrement_strong_count_in(ptr, Global) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// Consumes the `Rc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Rc` using
@@ -891,16 +1336,18 @@ impl<T: ?Sized> Rc<T> {
unsafe { ptr::addr_of_mut!((*ptr).value) }
}
- /// Constructs an `Rc<T>` from a raw pointer.
+ /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
///
/// The raw pointer must have been previously returned by a call to
- /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
+ /// [`Rc<U, A>::into_raw`][into_raw] where `U` must have the same size
/// and alignment as `T`. This is trivially true if `U` is `T`.
/// Note that if `U` is not `T` but has the same size and alignment, this is
/// basically like transmuting references of different types. See
/// [`mem::transmute`] for more information on what
/// restrictions apply in this case.
///
+ /// The raw pointer must point to a block of memory allocated by `alloc`
+ ///
/// The user of `from_raw` has to make sure a specific value of `T` is only
/// dropped once.
///
@@ -912,14 +1359,17 @@ impl<T: ?Sized> Rc<T> {
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::rc::Rc;
+ /// use std::alloc::System;
///
- /// let x = Rc::new("hello".to_owned());
+ /// let x = Rc::new_in("hello".to_owned(), System);
/// let x_ptr = Rc::into_raw(x);
///
/// unsafe {
/// // Convert back to an `Rc` to prevent leak.
- /// let x = Rc::from_raw(x_ptr);
+ /// let x = Rc::from_raw_in(x_ptr, System);
/// assert_eq!(&*x, "hello");
///
/// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
@@ -927,14 +1377,14 @@ impl<T: ?Sized> Rc<T> {
///
/// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
/// ```
- #[stable(feature = "rc_raw", since = "1.17.0")]
- pub unsafe fn from_raw(ptr: *const T) -> Self {
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
let offset = unsafe { data_offset(ptr) };
// Reverse the offset to find the original RcBox.
let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcBox<T> };
- unsafe { Self::from_ptr(rc_ptr) }
+ unsafe { Self::from_ptr_in(rc_ptr, alloc) }
}
/// Creates a new [`Weak`] pointer to this allocation.
@@ -951,11 +1401,14 @@ impl<T: ?Sized> Rc<T> {
#[must_use = "this returns a new `Weak` pointer, \
without modifying the original `Rc`"]
#[stable(feature = "rc_weak", since = "1.4.0")]
- pub fn downgrade(this: &Self) -> Weak<T> {
+ pub fn downgrade(this: &Self) -> Weak<T, A>
+ where
+ A: Clone,
+ {
this.inner().inc_weak();
// Make sure we do not create a dangling Weak
debug_assert!(!is_dangling(this.ptr.as_ptr()));
- Weak { ptr: this.ptr }
+ Weak { ptr: this.ptr, alloc: this.alloc.clone() }
}
/// Gets the number of [`Weak`] pointers to this allocation.
@@ -999,30 +1452,37 @@ impl<T: ?Sized> Rc<T> {
///
/// # Safety
///
- /// The pointer must have been obtained through `Rc::into_raw`, and the
+ /// The pointer must have been obtained through `Rc::into_raw`, the
/// associated `Rc` instance must be valid (i.e. the strong count must be at
- /// least 1) for the duration of this method.
+ /// least 1) for the duration of this method, and `ptr` must point to a block of memory
+ /// allocated by `alloc`
///
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::rc::Rc;
+ /// use std::alloc::System;
///
- /// let five = Rc::new(5);
+ /// let five = Rc::new_in(5, System);
///
/// unsafe {
/// let ptr = Rc::into_raw(five);
- /// Rc::increment_strong_count(ptr);
+ /// Rc::increment_strong_count_in(ptr, System);
///
- /// let five = Rc::from_raw(ptr);
+ /// let five = Rc::from_raw_in(ptr, System);
/// assert_eq!(2, Rc::strong_count(&five));
/// }
/// ```
#[inline]
- #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
- pub unsafe fn increment_strong_count(ptr: *const T) {
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
+ where
+ A: Clone,
+ {
// Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
- let rc = unsafe { mem::ManuallyDrop::new(Rc::<T>::from_raw(ptr)) };
+ let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
// Now increase refcount, but don't drop new refcount either
let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
}
@@ -1032,33 +1492,36 @@ impl<T: ?Sized> Rc<T> {
///
/// # Safety
///
- /// The pointer must have been obtained through `Rc::into_raw`, and the
+ /// The pointer must have been obtained through `Rc::into_raw`, the
/// associated `Rc` instance must be valid (i.e. the strong count must be at
- /// least 1) when invoking this method. This method can be used to release
- /// the final `Rc` and backing storage, but **should not** be called after
- /// the final `Rc` has been released.
+ /// least 1) when invoking this method, and `ptr` must point to a block of memory
+ /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage,
+ /// but **should not** be called after the final `Rc` has been released.
///
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::rc::Rc;
+ /// use std::alloc::System;
///
- /// let five = Rc::new(5);
+ /// let five = Rc::new_in(5, System);
///
/// unsafe {
/// let ptr = Rc::into_raw(five);
- /// Rc::increment_strong_count(ptr);
+ /// Rc::increment_strong_count_in(ptr, System);
///
- /// let five = Rc::from_raw(ptr);
+ /// let five = Rc::from_raw_in(ptr, System);
/// assert_eq!(2, Rc::strong_count(&five));
- /// Rc::decrement_strong_count(ptr);
+ /// Rc::decrement_strong_count_in(ptr, System);
/// assert_eq!(1, Rc::strong_count(&five));
/// }
/// ```
#[inline]
- #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
- pub unsafe fn decrement_strong_count(ptr: *const T) {
- unsafe { drop(Rc::from_raw(ptr)) };
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
+ unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
}
/// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
@@ -1188,7 +1651,7 @@ impl<T: ?Sized> Rc<T> {
}
}
-impl<T: Clone> Rc<T> {
+impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
/// Makes a mutable reference into the given `Rc`.
///
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
@@ -1246,7 +1709,7 @@ impl<T: Clone> Rc<T> {
if Rc::strong_count(this) != 1 {
// Gotta clone the data, there are other Rcs.
// Pre-allocate memory to allow writing the cloned value directly.
- let mut rc = Self::new_uninit();
+ let mut rc = Self::new_uninit_in(this.alloc.clone());
unsafe {
let data = Rc::get_mut_unchecked(&mut rc);
(**this).write_clone_into_raw(data.as_mut_ptr());
@@ -1254,7 +1717,7 @@ impl<T: Clone> Rc<T> {
}
} else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks
- let mut rc = Self::new_uninit();
+ let mut rc = Self::new_uninit_in(this.alloc.clone());
unsafe {
let data = Rc::get_mut_unchecked(&mut rc);
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
@@ -1310,7 +1773,7 @@ impl<T: Clone> Rc<T> {
}
}
-impl Rc<dyn Any> {
+impl<A: Allocator + Clone> Rc<dyn Any, A> {
/// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
///
/// # Examples
@@ -1331,12 +1794,13 @@ impl Rc<dyn Any> {
/// ```
#[inline]
#[stable(feature = "rc_downcast", since = "1.29.0")]
- pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
+ pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
if (*self).is::<T>() {
unsafe {
let ptr = self.ptr.cast::<RcBox<T>>();
+ let alloc = self.alloc.clone();
forget(self);
- Ok(Rc::from_inner(ptr))
+ Ok(Rc::from_inner_in(ptr, alloc))
}
} else {
Err(self)
@@ -1371,11 +1835,12 @@ impl Rc<dyn Any> {
/// [`downcast`]: Self::downcast
#[inline]
#[unstable(feature = "downcast_unchecked", issue = "90850")]
- pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T> {
+ pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
unsafe {
let ptr = self.ptr.cast::<RcBox<T>>();
+ let alloc = self.alloc.clone();
mem::forget(self);
- Rc::from_inner(ptr)
+ Rc::from_inner_in(ptr, alloc)
}
}
}
@@ -1427,25 +1892,27 @@ impl<T: ?Sized> Rc<T> {
Ok(inner)
}
+}
+impl<T: ?Sized, A: Allocator> Rc<T, A> {
/// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
#[cfg(not(no_global_oom_handling))]
- unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
+ unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcBox<T> {
// Allocate for the `RcBox<T>` using the given value.
unsafe {
- Self::allocate_for_layout(
+ Rc::<T>::allocate_for_layout(
Layout::for_value(&*ptr),
- |layout| Global.allocate(layout),
+ |layout| alloc.allocate(layout),
|mem| mem.with_metadata_of(ptr as *const RcBox<T>),
)
}
}
#[cfg(not(no_global_oom_handling))]
- fn from_box(src: Box<T>) -> Rc<T> {
+ fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
unsafe {
let value_size = size_of_val(&*src);
- let ptr = Self::allocate_for_ptr(&*src);
+ let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
// Copy value as bytes
ptr::copy_nonoverlapping(
@@ -1455,10 +1922,11 @@ impl<T: ?Sized> Rc<T> {
);
// Free the allocation without dropping its contents
- let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop<T>);
+ let (bptr, alloc) = Box::into_raw_with_allocator(src);
+ let src = Box::from_raw(bptr as *mut mem::ManuallyDrop<T>);
drop(src);
- Self::from_ptr(ptr)
+ Self::from_ptr_in(ptr, alloc)
}
}
}
@@ -1471,7 +1939,7 @@ impl<T> Rc<[T]> {
Self::allocate_for_layout(
Layout::array::<T>(len).unwrap(),
|layout| Global.allocate(layout),
- |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
+ |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>,
)
}
}
@@ -1538,6 +2006,21 @@ impl<T> Rc<[T]> {
}
}
+impl<T, A: Allocator> Rc<[T], A> {
+ /// Allocates an `RcBox<[T]>` with the given length.
+ #[inline]
+ #[cfg(not(no_global_oom_handling))]
+ unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcBox<[T]> {
+ unsafe {
+ Rc::<[T]>::allocate_for_layout(
+ Layout::array::<T>(len).unwrap(),
+ |layout| alloc.allocate(layout),
+ |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>,
+ )
+ }
+ }
+}
+
/// Specialization trait used for `From<&[T]>`.
trait RcFromSlice<T> {
fn from_slice(slice: &[T]) -> Self;
@@ -1560,7 +2043,7 @@ impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Deref for Rc<T> {
+impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
type Target = T;
#[inline(always)]
@@ -1573,7 +2056,7 @@ impl<T: ?Sized> Deref for Rc<T> {
impl<T: ?Sized> Receiver for Rc<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
+unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
/// Drops the `Rc`.
///
/// This will decrement the strong reference count. If the strong reference
@@ -1611,7 +2094,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.inner().dec_weak();
if self.inner().weak() == 0 {
- Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
+ self.alloc.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
@@ -1619,7 +2102,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Clone for Rc<T> {
+impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
/// Makes a clone of the `Rc` pointer.
///
/// This creates another pointer to the same allocation, increasing the
@@ -1635,10 +2118,10 @@ impl<T: ?Sized> Clone for Rc<T> {
/// let _ = Rc::clone(&five);
/// ```
#[inline]
- fn clone(&self) -> Rc<T> {
+ fn clone(&self) -> Self {
unsafe {
self.inner().inc_strong();
- Self::from_inner(self.ptr)
+ Self::from_inner_in(self.ptr, self.alloc.clone())
}
}
}
@@ -1663,20 +2146,20 @@ impl<T: Default> Default for Rc<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-trait RcEqIdent<T: ?Sized + PartialEq> {
- fn eq(&self, other: &Rc<T>) -> bool;
- fn ne(&self, other: &Rc<T>) -> bool;
+trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
+ fn eq(&self, other: &Rc<T, A>) -> bool;
+ fn ne(&self, other: &Rc<T, A>) -> bool;
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
+impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
#[inline]
- default fn eq(&self, other: &Rc<T>) -> bool {
+ default fn eq(&self, other: &Rc<T, A>) -> bool {
**self == **other
}
#[inline]
- default fn ne(&self, other: &Rc<T>) -> bool {
+ default fn ne(&self, other: &Rc<T, A>) -> bool {
**self != **other
}
}
@@ -1695,20 +2178,20 @@ impl<T: Eq> MarkerEq for T {}
///
/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> {
+impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
#[inline]
- fn eq(&self, other: &Rc<T>) -> bool {
+ fn eq(&self, other: &Rc<T, A>) -> bool {
Rc::ptr_eq(self, other) || **self == **other
}
#[inline]
- fn ne(&self, other: &Rc<T>) -> bool {
+ fn ne(&self, other: &Rc<T, A>) -> bool {
!Rc::ptr_eq(self, other) && **self != **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
+impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
/// Equality for two `Rc`s.
///
/// Two `Rc`s are equal if their inner values are equal, even if they are
@@ -1728,7 +2211,7 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// assert!(five == Rc::new(5));
/// ```
#[inline]
- fn eq(&self, other: &Rc<T>) -> bool {
+ fn eq(&self, other: &Rc<T, A>) -> bool {
RcEqIdent::eq(self, other)
}
@@ -1750,16 +2233,16 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// assert!(five != Rc::new(6));
/// ```
#[inline]
- fn ne(&self, other: &Rc<T>) -> bool {
+ fn ne(&self, other: &Rc<T, A>) -> bool {
RcEqIdent::ne(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Eq> Eq for Rc<T> {}
+impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
+impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
/// Partial comparison for two `Rc`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
@@ -1775,7 +2258,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
/// ```
#[inline(always)]
- fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
+ fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
@@ -1793,7 +2276,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five < Rc::new(6));
/// ```
#[inline(always)]
- fn lt(&self, other: &Rc<T>) -> bool {
+ fn lt(&self, other: &Rc<T, A>) -> bool {
**self < **other
}
@@ -1811,7 +2294,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five <= Rc::new(5));
/// ```
#[inline(always)]
- fn le(&self, other: &Rc<T>) -> bool {
+ fn le(&self, other: &Rc<T, A>) -> bool {
**self <= **other
}
@@ -1829,7 +2312,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five > Rc::new(4));
/// ```
#[inline(always)]
- fn gt(&self, other: &Rc<T>) -> bool {
+ fn gt(&self, other: &Rc<T, A>) -> bool {
**self > **other
}
@@ -1847,13 +2330,13 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// assert!(five >= Rc::new(5));
/// ```
#[inline(always)]
- fn ge(&self, other: &Rc<T>) -> bool {
+ fn ge(&self, other: &Rc<T, A>) -> bool {
**self >= **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Ord> Ord for Rc<T> {
+impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
/// Comparison for two `Rc`s.
///
/// The two are compared by calling `cmp()` on their inner values.
@@ -1869,34 +2352,34 @@ impl<T: ?Sized + Ord> Ord for Rc<T> {
/// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
/// ```
#[inline]
- fn cmp(&self, other: &Rc<T>) -> Ordering {
+ fn cmp(&self, other: &Rc<T, A>) -> Ordering {
(**self).cmp(&**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Hash> Hash for Rc<T> {
+impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
+impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
+impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> fmt::Pointer for Rc<T> {
+impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&(&**self as *const T), f)
}
@@ -1982,7 +2465,7 @@ impl From<String> for Rc<str> {
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T: ?Sized> From<Box<T>> for Rc<T> {
+impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
/// Move a boxed object to a new, reference counted, allocation.
///
/// # Example
@@ -1994,31 +2477,37 @@ impl<T: ?Sized> From<Box<T>> for Rc<T> {
/// assert_eq!(1, *shared);
/// ```
#[inline]
- fn from(v: Box<T>) -> Rc<T> {
- Rc::from_box(v)
+ fn from(v: Box<T, A>) -> Rc<T, A> {
+ Rc::from_box_in(v)
}
}
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T> From<Vec<T>> for Rc<[T]> {
+impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
/// Allocate a reference-counted slice and move `v`'s items into it.
///
/// # Example
///
/// ```
/// # use std::rc::Rc;
- /// let original: Box<Vec<i32>> = Box::new(vec![1, 2, 3]);
- /// let shared: Rc<Vec<i32>> = Rc::from(original);
- /// assert_eq!(vec![1, 2, 3], *shared);
+ /// let unique: Vec<i32> = vec![1, 2, 3];
+ /// let shared: Rc<[i32]> = Rc::from(unique);
+ /// assert_eq!(&[1, 2, 3], &shared[..]);
/// ```
#[inline]
- fn from(mut v: Vec<T>) -> Rc<[T]> {
+ fn from(v: Vec<T, A>) -> Rc<[T], A> {
unsafe {
- let rc = Rc::copy_from_slice(&v);
- // Allow the Vec to free its memory, but not destroy its contents
- v.set_len(0);
- rc
+ let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
+
+ let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
+ ptr::copy_nonoverlapping(vec_ptr, &mut (*rc_ptr).value as *mut [T] as *mut T, len);
+
+ // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
+ // without dropping its contents or the allocator
+ let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
+
+ Self::from_ptr_in(rc_ptr, alloc)
}
}
}
@@ -2189,7 +2678,10 @@ impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
///
/// [`upgrade`]: Weak::upgrade
#[stable(feature = "rc_weak", since = "1.4.0")]
-pub struct Weak<T: ?Sized> {
+pub struct Weak<
+ T: ?Sized,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
// This is a `NonNull` to allow optimizing the size of this type in enums,
// but it is not necessarily a valid pointer.
// `Weak::new` sets this to `usize::MAX` so that it doesn’t need
@@ -2197,15 +2689,16 @@ pub struct Weak<T: ?Sized> {
// will ever have because RcBox has alignment at least 2.
// This is only possible when `T: Sized`; unsized `T` never dangle.
ptr: NonNull<RcBox<T>>,
+ alloc: A,
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> !Send for Weak<T> {}
+impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> !Sync for Weak<T> {}
+impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
#[unstable(feature = "coerce_unsized", issue = "18598")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
@@ -2224,16 +2717,45 @@ impl<T> Weak<T> {
/// let empty: Weak<i64> = Weak::new();
/// assert!(empty.upgrade().is_none());
/// ```
+ #[inline]
#[stable(feature = "downgraded_weak", since = "1.10.0")]
- #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
+ #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
#[must_use]
pub const fn new() -> Weak<T> {
- Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) } }
+ Weak {
+ ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) },
+ alloc: Global,
+ }
+ }
+}
+
+impl<T, A: Allocator> Weak<T, A> {
+ /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
+ /// allocator.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`upgrade`]: Weak::upgrade
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Weak;
+ ///
+ /// let empty: Weak<i64> = Weak::new();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn new_in(alloc: A) -> Weak<T, A> {
+ Weak {
+ ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) },
+ alloc,
+ }
}
}
pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
- (ptr as *mut ()).addr() == usize::MAX
+ (ptr.cast::<()>()).addr() == usize::MAX
}
/// Helper type to allow accessing the reference counts without
@@ -2244,6 +2766,56 @@ struct WeakInner<'a> {
}
impl<T: ?Sized> Weak<T> {
+ /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
+ ///
+ /// This can be used to safely get a strong reference (by calling [`upgrade`]
+ /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+ ///
+ /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
+ /// as these don't own anything; the method still works on them).
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have originated from the [`into_raw`] and must still own its potential
+ /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
+ ///
+ /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
+ /// takes ownership of one weak reference currently represented as a raw pointer (the weak
+ /// count is not modified by this operation) and therefore it must be paired with a previous
+ /// call to [`into_raw`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let strong = Rc::new("hello".to_owned());
+ ///
+ /// let raw_1 = Rc::downgrade(&strong).into_raw();
+ /// let raw_2 = Rc::downgrade(&strong).into_raw();
+ ///
+ /// assert_eq!(2, Rc::weak_count(&strong));
+ ///
+ /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+ /// assert_eq!(1, Rc::weak_count(&strong));
+ ///
+ /// drop(strong);
+ ///
+ /// // Decrement the last weak count.
+ /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+ /// ```
+ ///
+ /// [`into_raw`]: Weak::into_raw
+ /// [`upgrade`]: Weak::upgrade
+ /// [`new`]: Weak::new
+ #[inline]
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ unsafe { Self::from_raw_in(ptr, Global) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Weak<T, A> {
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
///
/// The pointer is valid only if there are some strong references. The pointer may be dangling,
@@ -2321,6 +2893,45 @@ impl<T: ?Sized> Weak<T> {
result
}
+ /// Consumes the `Weak<T>` and turns it into a raw pointer.
+ ///
+ /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
+ /// one weak reference (the weak count is not modified by this operation). It can be turned
+ /// back into the `Weak<T>` with [`from_raw`].
+ ///
+ /// The same restrictions of accessing the target of the pointer as with
+ /// [`as_ptr`] apply.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let strong = Rc::new("hello".to_owned());
+ /// let weak = Rc::downgrade(&strong);
+ /// let raw = weak.into_raw();
+ ///
+ /// assert_eq!(1, Rc::weak_count(&strong));
+ /// assert_eq!("hello", unsafe { &*raw });
+ ///
+ /// drop(unsafe { Weak::from_raw(raw) });
+ /// assert_eq!(0, Rc::weak_count(&strong));
+ /// ```
+ ///
+ /// [`from_raw`]: Weak::from_raw
+ /// [`as_ptr`]: Weak::as_ptr
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn into_raw_and_alloc(self) -> (*const T, A)
+ where
+ A: Clone,
+ {
+ let result = self.as_ptr();
+ let alloc = self.alloc.clone();
+ mem::forget(self);
+ (result, alloc)
+ }
+
/// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
///
/// This can be used to safely get a strong reference (by calling [`upgrade`]
@@ -2332,7 +2943,7 @@ impl<T: ?Sized> Weak<T> {
/// # Safety
///
/// The pointer must have originated from the [`into_raw`] and must still own its potential
- /// weak reference.
+ /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
///
/// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
/// takes ownership of one weak reference currently represented as a raw pointer (the weak
@@ -2363,8 +2974,9 @@ impl<T: ?Sized> Weak<T> {
/// [`into_raw`]: Weak::into_raw
/// [`upgrade`]: Weak::upgrade
/// [`new`]: Weak::new
- #[stable(feature = "weak_into_raw", since = "1.45.0")]
- pub unsafe fn from_raw(ptr: *const T) -> Self {
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
// See Weak::as_ptr for context on how the input pointer is derived.
let ptr = if is_dangling(ptr as *mut T) {
@@ -2380,7 +2992,7 @@ impl<T: ?Sized> Weak<T> {
};
// SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
- Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
+ Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
}
/// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
@@ -2409,7 +3021,10 @@ impl<T: ?Sized> Weak<T> {
#[must_use = "this returns a new `Rc`, \
without modifying the original weak pointer"]
#[stable(feature = "rc_weak", since = "1.4.0")]
- pub fn upgrade(&self) -> Option<Rc<T>> {
+ pub fn upgrade(&self) -> Option<Rc<T, A>>
+ where
+ A: Clone,
+ {
let inner = self.inner()?;
if inner.strong() == 0 {
@@ -2417,7 +3032,7 @@ impl<T: ?Sized> Weak<T> {
} else {
unsafe {
inner.inc_strong();
- Some(Rc::from_inner(self.ptr))
+ Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
}
}
}
@@ -2437,15 +3052,15 @@ impl<T: ?Sized> Weak<T> {
#[must_use]
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn weak_count(&self) -> usize {
- self.inner()
- .map(|inner| {
- if inner.strong() > 0 {
- inner.weak() - 1 // subtract the implicit weak ptr
- } else {
- 0
- }
- })
- .unwrap_or(0)
+ if let Some(inner) = self.inner() {
+ if inner.strong() > 0 {
+ inner.weak() - 1 // subtract the implicit weak ptr
+ } else {
+ 0
+ }
+ } else {
+ 0
+ }
}
/// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
@@ -2513,7 +3128,7 @@ impl<T: ?Sized> Weak<T> {
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> {
+unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
/// Drops the `Weak` pointer.
///
/// # Examples
@@ -2546,14 +3161,14 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> {
// the strong pointers have disappeared.
if inner.weak() == 0 {
unsafe {
- Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
+ self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
}
}
}
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> Clone for Weak<T> {
+impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
/// Makes a clone of the `Weak` pointer that points to the same allocation.
///
/// # Examples
@@ -2566,16 +3181,16 @@ impl<T: ?Sized> Clone for Weak<T> {
/// let _ = Weak::clone(&weak_five);
/// ```
#[inline]
- fn clone(&self) -> Weak<T> {
+ fn clone(&self) -> Weak<T, A> {
if let Some(inner) = self.inner() {
inner.inc_weak()
}
- Weak { ptr: self.ptr }
+ Weak { ptr: self.ptr, alloc: self.alloc.clone() }
}
}
#[stable(feature = "rc_weak", since = "1.4.0")]
-impl<T: ?Sized> fmt::Debug for Weak<T> {
+impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
@@ -2707,21 +3322,21 @@ impl<'a> RcInnerPtr for WeakInner<'a> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
+impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
fn borrow(&self) -> &T {
&**self
}
}
#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
-impl<T: ?Sized> AsRef<T> for Rc<T> {
+impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
fn as_ref(&self) -> &T {
&**self
}
}
#[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Rc<T> {}
+impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
/// Get the offset within an `RcBox` for the payload behind a pointer.
///
@@ -2822,7 +3437,7 @@ impl<T> UniqueRc<T> {
unsafe {
this.ptr.as_ref().inc_weak();
}
- Weak { ptr: this.ptr }
+ Weak { ptr: this.ptr, alloc: Global }
}
/// Converts the `UniqueRc` into a regular [`Rc`]
diff --git a/library/alloc/src/str.rs b/library/alloc/src/str.rs
index 849774099..38f9f39fb 100644
--- a/library/alloc/src/str.rs
+++ b/library/alloc/src/str.rs
@@ -223,8 +223,6 @@ impl str {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s = "this is a string";
/// let boxed_str = s.to_owned().into_boxed_str();
@@ -487,8 +485,6 @@ impl str {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let string = String::from("birthday gift");
/// let boxed_str = string.clone().into_boxed_str();
@@ -602,8 +598,6 @@ impl str {
///
/// # Examples
///
-/// Basic usage:
-///
/// ```
/// let smile_utf8 = Box::new([226, 152, 186]);
/// let smile = unsafe { std::str::from_boxed_utf8_unchecked(smile_utf8) };
@@ -618,7 +612,7 @@ pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
}
/// Converts the bytes while the bytes are still ascii.
-/// For better average performance, this is happens in chunks of `2*size_of::<usize>()`.
+/// For better average performance, this happens in chunks of `2*size_of::<usize>()`.
/// Returns a vec with the converted bytes.
#[inline]
#[cfg(not(test))]
diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs
index ad7b77f54..ed43244eb 100644
--- a/library/alloc/src/string.rs
+++ b/library/alloc/src/string.rs
@@ -388,8 +388,6 @@ pub struct String {
///
/// # Examples
///
-/// Basic usage:
-///
/// ```
/// // some invalid bytes, in a vector
/// let bytes = vec![0, 159];
@@ -412,9 +410,8 @@ pub struct FromUtf8Error {
/// This type is the error type for the [`from_utf16`] method on [`String`].
///
/// [`from_utf16`]: String::from_utf16
-/// # Examples
///
-/// Basic usage:
+/// # Examples
///
/// ```
/// // 𝄞mu<invalid>ic
@@ -441,8 +438,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s = String::new();
/// ```
@@ -472,8 +467,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::with_capacity(10);
///
@@ -661,8 +654,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// // 𝄞music
/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
@@ -704,8 +695,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// // 𝄞mus<invalid>ic<invalid>
/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
@@ -784,8 +773,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// use std::mem;
///
@@ -827,8 +814,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// // some bytes, in a vector
/// let sparkle_heart = vec![240, 159, 146, 150];
@@ -852,8 +837,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s = String::from("hello");
/// let bytes = s.into_bytes();
@@ -871,8 +854,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s = String::from("foo");
///
@@ -889,8 +870,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("foobar");
/// let s_mut_str = s.as_mut_str();
@@ -910,8 +889,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("foo");
///
@@ -966,8 +943,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s = String::with_capacity(10);
///
@@ -1157,8 +1132,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("foo");
///
@@ -1206,8 +1179,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("abc");
///
@@ -1235,8 +1206,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s = String::from("hello");
///
@@ -1263,8 +1232,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("hello");
///
@@ -1287,14 +1254,12 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
- /// let mut s = String::from("foo");
+ /// let mut s = String::from("abč");
///
- /// assert_eq!(s.pop(), Some('o'));
- /// assert_eq!(s.pop(), Some('o'));
- /// assert_eq!(s.pop(), Some('f'));
+ /// assert_eq!(s.pop(), Some('č'));
+ /// assert_eq!(s.pop(), Some('b'));
+ /// assert_eq!(s.pop(), Some('a'));
///
/// assert_eq!(s.pop(), None);
/// ```
@@ -1321,14 +1286,12 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
- /// let mut s = String::from("foo");
+ /// let mut s = String::from("abç");
///
- /// assert_eq!(s.remove(0), 'f');
- /// assert_eq!(s.remove(1), 'o');
- /// assert_eq!(s.remove(0), 'o');
+ /// assert_eq!(s.remove(0), 'a');
+ /// assert_eq!(s.remove(1), 'ç');
+ /// assert_eq!(s.remove(0), 'b');
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
@@ -1514,8 +1477,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::with_capacity(3);
///
@@ -1563,8 +1524,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("bar");
///
@@ -1595,8 +1554,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("hello");
///
@@ -1620,8 +1577,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let a = String::from("foo");
/// assert_eq!(a.len(), 3);
@@ -1641,8 +1596,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut v = String::new();
/// assert!(v.is_empty());
@@ -1697,8 +1650,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("foo");
///
@@ -1734,8 +1685,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("α is alpha, β is beta");
/// let beta_offset = s.find('β').unwrap_or(s.len());
@@ -1784,8 +1733,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let mut s = String::from("α is alpha, β is beta");
/// let beta_offset = s.find('β').unwrap_or(s.len());
@@ -1834,8 +1781,6 @@ impl String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s = String::from("hello");
///
@@ -1866,8 +1811,6 @@ impl String {
///
/// # Examples
///
- /// Simple usage:
- ///
/// ```
/// let x = String::from("bucket");
/// let static_ref: &'static mut str = x.leak();
@@ -1886,8 +1829,6 @@ impl FromUtf8Error {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// // some invalid bytes, in a vector
/// let bytes = vec![0, 159];
@@ -1910,8 +1851,6 @@ impl FromUtf8Error {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// // some invalid bytes, in a vector
/// let bytes = vec![0, 159];
@@ -1938,8 +1877,6 @@ impl FromUtf8Error {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// // some invalid bytes, in a vector
/// let bytes = vec![0, 159];
@@ -2490,8 +2427,6 @@ pub trait ToString {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let i = 5;
/// let five = String::from("5");
@@ -2527,6 +2462,7 @@ impl<T: fmt::Display + ?Sized> ToString for T {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "ascii_char", issue = "110998")]
impl ToString for core::ascii::Char {
@@ -2536,6 +2472,7 @@ impl ToString for core::ascii::Char {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "char_to_string_specialization", since = "1.46.0")]
impl ToString for char {
@@ -2545,6 +2482,7 @@ impl ToString for char {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "bool_to_string_specialization", since = "1.68.0")]
impl ToString for bool {
@@ -2554,6 +2492,7 @@ impl ToString for bool {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "u8_to_string_specialization", since = "1.54.0")]
impl ToString for u8 {
@@ -2574,6 +2513,7 @@ impl ToString for u8 {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "i8_to_string_specialization", since = "1.54.0")]
impl ToString for i8 {
@@ -2597,6 +2537,7 @@ impl ToString for i8 {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "str_to_string_specialization", since = "1.9.0")]
impl ToString for str {
@@ -2606,6 +2547,7 @@ impl ToString for str {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "cow_str_to_string_specialization", since = "1.17.0")]
impl ToString for Cow<'_, str> {
@@ -2615,6 +2557,7 @@ impl ToString for Cow<'_, str> {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "string_to_string_specialization", since = "1.17.0")]
impl ToString for String {
@@ -2624,6 +2567,7 @@ impl ToString for String {
}
}
+#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "fmt_arguments_to_string_specialization", since = "1.71.0")]
impl ToString for fmt::Arguments<'_> {
@@ -2702,8 +2646,6 @@ impl From<Box<str>> for String {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s1: String = String::from("hello world");
/// let s2: Box<str> = s1.into_boxed_str();
@@ -2723,8 +2665,6 @@ impl From<String> for Box<str> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s1: String = String::from("hello world");
/// let s2: Box<str> = Box::from(s1);
@@ -2857,8 +2797,6 @@ impl From<String> for Vec<u8> {
///
/// # Examples
///
- /// Basic usage:
- ///
/// ```
/// let s1 = String::from("hello world");
/// let v1 = Vec::from(s1);
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 5bb1a93ae..d3b755844 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -153,7 +153,7 @@ macro_rules! acquire {
///
/// ## `Deref` behavior
///
-/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
+/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
/// clashes with `T`'s methods, the methods of `Arc<T>` itself are associated
/// functions, called using [fully qualified syntax]:
@@ -187,7 +187,6 @@ macro_rules! acquire {
/// [mutex]: ../../std/sync/struct.Mutex.html
/// [rwlock]: ../../std/sync/struct.RwLock.html
/// [atomic]: core::sync::atomic
-/// [deref]: core::ops::Deref
/// [downgrade]: Arc::downgrade
/// [upgrade]: Weak::upgrade
/// [RefCell\<T>]: core::cell::RefCell
@@ -246,32 +245,48 @@ macro_rules! acquire {
/// [rc_examples]: crate::rc#examples
#[cfg_attr(not(test), rustc_diagnostic_item = "Arc")]
#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Arc<T: ?Sized> {
+pub struct Arc<
+ T: ?Sized,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
ptr: NonNull<ArcInner<T>>,
phantom: PhantomData<ArcInner<T>>,
+ alloc: A,
}
#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
+unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Send> Send for Arc<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
+unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Sync> Sync for Arc<T, A> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
-impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Arc<T> {}
+impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Arc<T, A> {}
#[unstable(feature = "coerce_unsized", issue = "18598")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Arc<U, A>> for Arc<T, A> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
impl<T: ?Sized> Arc<T> {
unsafe fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
- Self { ptr, phantom: PhantomData }
+ unsafe { Self::from_inner_in(ptr, Global) }
}
unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
- unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
+ unsafe { Self::from_ptr_in(ptr, Global) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Arc<T, A> {
+ #[inline]
+ unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
+ Self { ptr, phantom: PhantomData, alloc }
+ }
+
+ #[inline]
+ unsafe fn from_ptr_in(ptr: *mut ArcInner<T>, alloc: A) -> Self {
+ unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
}
}
@@ -296,7 +311,10 @@ impl<T: ?Sized> Arc<T> {
///
/// [`upgrade`]: Weak::upgrade
#[stable(feature = "arc_weak", since = "1.4.0")]
-pub struct Weak<T: ?Sized> {
+pub struct Weak<
+ T: ?Sized,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
// This is a `NonNull` to allow optimizing the size of this type in enums,
// but it is not necessarily a valid pointer.
// `Weak::new` sets this to `usize::MAX` so that it doesn’t need
@@ -304,15 +322,16 @@ pub struct Weak<T: ?Sized> {
// will ever have because RcBox has alignment at least 2.
// This is only possible when `T: Sized`; unsized `T` never dangle.
ptr: NonNull<ArcInner<T>>,
+ alloc: A,
}
#[stable(feature = "arc_weak", since = "1.4.0")]
-unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
+unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Send> Send for Weak<T, A> {}
#[stable(feature = "arc_weak", since = "1.4.0")]
-unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
+unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Sync> Sync for Weak<T, A> {}
#[unstable(feature = "coerce_unsized", issue = "18598")]
-impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
@@ -442,7 +461,7 @@ impl<T> Arc<T> {
.into();
let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();
- let weak = Weak { ptr: init_ptr };
+ let weak = Weak { ptr: init_ptr, alloc: Global };
// It's important we don't give up ownership of the weak pointer, or
// else the memory might be freed by the time `data_fn` returns. If
@@ -510,7 +529,7 @@ impl<T> Arc<T> {
Arc::from_ptr(Arc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate(layout),
- |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
))
}
}
@@ -544,7 +563,7 @@ impl<T> Arc<T> {
Arc::from_ptr(Arc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate_zeroed(layout),
- |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
))
}
}
@@ -617,7 +636,7 @@ impl<T> Arc<T> {
Ok(Arc::from_ptr(Arc::try_allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate(layout),
- |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
)?))
}
}
@@ -650,10 +669,258 @@ impl<T> Arc<T> {
Ok(Arc::from_ptr(Arc::try_allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate_zeroed(layout),
- |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
+ <*mut u8>::cast,
)?))
}
}
+}
+
+impl<T, A: Allocator> Arc<T, A> {
+ /// Returns a reference to the underlying allocator.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Arc::allocator(&a)` instead of `a.allocator()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn allocator(this: &Self) -> &A {
+ &this.alloc
+ }
+ /// Constructs a new `Arc<T>` in the provided allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let five = Arc::new_in(5, System);
+ /// ```
+ #[inline]
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn new_in(data: T, alloc: A) -> Arc<T, A> {
+ // Start the weak pointer count as 1 which is the weak pointer that's
+ // held by all the strong pointers (kinda), see std/rc.rs for more info
+ let x = Box::new_in(
+ ArcInner {
+ strong: atomic::AtomicUsize::new(1),
+ weak: atomic::AtomicUsize::new(1),
+ data,
+ },
+ alloc,
+ );
+ let (ptr, alloc) = Box::into_unique(x);
+ unsafe { Self::from_inner_in(ptr.into(), alloc) }
+ }
+
+ /// Constructs a new `Arc` with uninitialized contents in the provided allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let mut five = Arc::<u32, _>::new_uninit_in(System);
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_uninit_in(alloc: A) -> Arc<mem::MaybeUninit<T>, A> {
+ unsafe {
+ Arc::from_ptr_in(
+ Arc::allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate(layout),
+ <*mut u8>::cast,
+ ),
+ alloc,
+ )
+ }
+ }
+
+ /// Constructs a new `Arc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes, in the provided allocator.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
+ /// of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let zero = Arc::<u32, _>::new_zeroed_in(System);
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0)
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_zeroed_in(alloc: A) -> Arc<mem::MaybeUninit<T>, A> {
+ unsafe {
+ Arc::from_ptr_in(
+ Arc::allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate_zeroed(layout),
+ <*mut u8>::cast,
+ ),
+ alloc,
+ )
+ }
+ }
+
+ /// Constructs a new `Pin<Arc<T, A>>` in the provided allocator. If `T` does not implement `Unpin`,
+ /// then `data` will be pinned in memory and unable to be moved.
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn pin_in(data: T, alloc: A) -> Pin<Arc<T, A>> {
+ unsafe { Pin::new_unchecked(Arc::new_in(data, alloc)) }
+ }
+
+ /// Constructs a new `Pin<Arc<T, A>>` in the provided allocator, return an error if allocation
+ /// fails.
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn try_pin_in(data: T, alloc: A) -> Result<Pin<Arc<T, A>>, AllocError> {
+ unsafe { Ok(Pin::new_unchecked(Arc::try_new_in(data, alloc)?)) }
+ }
+
+ /// Constructs a new `Arc<T, A>` in the provided allocator, returning an error if allocation fails.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let five = Arc::try_new_in(5, System)?;
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn try_new_in(data: T, alloc: A) -> Result<Arc<T, A>, AllocError> {
+ // Start the weak pointer count as 1 which is the weak pointer that's
+ // held by all the strong pointers (kinda), see std/rc.rs for more info
+ let x = Box::try_new_in(
+ ArcInner {
+ strong: atomic::AtomicUsize::new(1),
+ weak: atomic::AtomicUsize::new(1),
+ data,
+ },
+ alloc,
+ )?;
+ let (ptr, alloc) = Box::into_unique(x);
+ Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
+ }
+
+ /// Constructs a new `Arc` with uninitialized contents, in the provided allocator, returning an
+ /// error if allocation fails.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit, allocator_api)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let mut five = Arc::<u32, _>::try_new_uninit_in(System)?;
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn try_new_uninit_in(alloc: A) -> Result<Arc<mem::MaybeUninit<T>, A>, AllocError> {
+ unsafe {
+ Ok(Arc::from_ptr_in(
+ Arc::try_allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate(layout),
+ <*mut u8>::cast,
+ )?,
+ alloc,
+ ))
+ }
+ }
+
+ /// Constructs a new `Arc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes, in the provided allocator, returning an error if allocation
+ /// fails.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
+ /// of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit, allocator_api)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let zero = Arc::<u32, _>::try_new_zeroed_in(System)?;
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0);
+ /// # Ok::<(), std::alloc::AllocError>(())
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ // #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn try_new_zeroed_in(alloc: A) -> Result<Arc<mem::MaybeUninit<T>, A>, AllocError> {
+ unsafe {
+ Ok(Arc::from_ptr_in(
+ Arc::try_allocate_for_layout(
+ Layout::new::<T>(),
+ |layout| alloc.allocate_zeroed(layout),
+ <*mut u8>::cast,
+ )?,
+ alloc,
+ ))
+ }
+ }
/// Returns the inner value, if the `Arc` has exactly one strong reference.
///
/// Otherwise, an [`Err`] is returned with the same `Arc` that was
@@ -695,9 +962,10 @@ impl<T> Arc<T> {
unsafe {
let elem = ptr::read(&this.ptr.as_ref().data);
+ let alloc = ptr::read(&this.alloc); // copy the allocator
// Make a weak pointer to clean up the implicit strong-weak reference
- let _weak = Weak { ptr: this.ptr };
+ let _weak = Weak { ptr: this.ptr, alloc };
mem::forget(this);
Ok(elem)
@@ -814,9 +1082,11 @@ impl<T> Arc<T> {
// in `drop_slow`. Instead of dropping the value behind the pointer,
// it is read and eventually returned; `ptr::read` has the same
// safety conditions as `ptr::drop_in_place`.
+
let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) };
+ let alloc = unsafe { ptr::read(&this.alloc) };
- drop(Weak { ptr: this.ptr });
+ drop(Weak { ptr: this.ptr, alloc });
Some(inner)
}
@@ -891,7 +1161,83 @@ impl<T> Arc<[T]> {
}
}
-impl<T> Arc<mem::MaybeUninit<T>> {
+impl<T, A: Allocator> Arc<[T], A> {
+ /// Constructs a new atomically reference-counted slice with uninitialized contents in the
+ /// provided allocator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let mut values = Arc::<[u32], _>::new_uninit_slice_in(3, System);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+ /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+ /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_uninit_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit<T>], A> {
+ unsafe { Arc::from_ptr_in(Arc::allocate_for_slice_in(len, &alloc), alloc) }
+ }
+
+ /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being
+ /// filled with `0` bytes, in the provided allocator.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::sync::Arc;
+ /// use std::alloc::System;
+ ///
+ /// let values = Arc::<[u32], _>::new_zeroed_slice_in(3, System);
+ /// let values = unsafe { values.assume_init() };
+ ///
+ /// assert_eq!(*values, [0, 0, 0])
+ /// ```
+ ///
+ /// [zeroed]: mem::MaybeUninit::zeroed
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit<T>], A> {
+ unsafe {
+ Arc::from_ptr_in(
+ Arc::allocate_for_layout(
+ Layout::array::<T>(len).unwrap(),
+ |layout| alloc.allocate_zeroed(layout),
+ |mem| {
+ ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
+ as *mut ArcInner<[mem::MaybeUninit<T>]>
+ },
+ ),
+ alloc,
+ )
+ }
+ }
+}
+
+impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> {
/// Converts to `Arc<T>`.
///
/// # Safety
@@ -924,12 +1270,16 @@ impl<T> Arc<mem::MaybeUninit<T>> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[must_use = "`self` will be dropped if the result is not used"]
#[inline]
- pub unsafe fn assume_init(self) -> Arc<T> {
- unsafe { Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
+ pub unsafe fn assume_init(self) -> Arc<T, A>
+ where
+ A: Clone,
+ {
+ let md_self = mem::ManuallyDrop::new(self);
+ unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
}
}
-impl<T> Arc<[mem::MaybeUninit<T>]> {
+impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> {
/// Converts to `Arc<[T]>`.
///
/// # Safety
@@ -965,12 +1315,129 @@ impl<T> Arc<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[must_use = "`self` will be dropped if the result is not used"]
#[inline]
- pub unsafe fn assume_init(self) -> Arc<[T]> {
- unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
+ pub unsafe fn assume_init(self) -> Arc<[T], A>
+ where
+ A: Clone,
+ {
+ let md_self = mem::ManuallyDrop::new(self);
+ unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
}
}
impl<T: ?Sized> Arc<T> {
+ /// Constructs an `Arc<T>` from a raw pointer.
+ ///
+ /// The raw pointer must have been previously returned by a call to
+ /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and
+ /// alignment as `T`. This is trivially true if `U` is `T`.
+ /// Note that if `U` is not `T` but has the same size and alignment, this is
+ /// basically like transmuting references of different types. See
+ /// [`mem::transmute`][transmute] for more information on what
+ /// restrictions apply in this case.
+ ///
+ /// The user of `from_raw` has to make sure a specific value of `T` is only
+ /// dropped once.
+ ///
+ /// This function is unsafe because improper use may lead to memory unsafety,
+ /// even if the returned `Arc<T>` is never accessed.
+ ///
+ /// [into_raw]: Arc::into_raw
+ /// [transmute]: core::mem::transmute
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new("hello".to_owned());
+ /// let x_ptr = Arc::into_raw(x);
+ ///
+ /// unsafe {
+ /// // Convert back to an `Arc` to prevent leak.
+ /// let x = Arc::from_raw(x_ptr);
+ /// assert_eq!(&*x, "hello");
+ ///
+ /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe.
+ /// }
+ ///
+ /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ unsafe { Arc::from_raw_in(ptr, Global) }
+ }
+
+ /// Increments the strong reference count on the `Arc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Arc::into_raw`, and the
+ /// associated `Arc` instance must be valid (i.e. the strong count must be at
+ /// least 1) for the duration of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Arc::into_raw(five);
+ /// Arc::increment_strong_count(ptr);
+ ///
+ /// // This assertion is deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// let five = Arc::from_raw(ptr);
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")]
+ pub unsafe fn increment_strong_count(ptr: *const T) {
+ unsafe { Arc::increment_strong_count_in(ptr, Global) }
+ }
+
+ /// Decrements the strong reference count on the `Arc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Arc::into_raw`, and the
+ /// associated `Arc` instance must be valid (i.e. the strong count must be at
+ /// least 1) when invoking this method. This method can be used to release the final
+ /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been
+ /// released.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Arc::into_raw(five);
+ /// Arc::increment_strong_count(ptr);
+ ///
+ /// // Those assertions are deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// let five = Arc::from_raw(ptr);
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// Arc::decrement_strong_count(ptr);
+ /// assert_eq!(1, Arc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")]
+ pub unsafe fn decrement_strong_count(ptr: *const T) {
+ unsafe { Arc::decrement_strong_count_in(ptr, Global) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Arc<T, A> {
/// Consumes the `Arc`, returning the wrapped pointer.
///
/// To avoid a memory leak the pointer must be converted back to an `Arc` using
@@ -1020,16 +1487,18 @@ impl<T: ?Sized> Arc<T> {
unsafe { ptr::addr_of_mut!((*ptr).data) }
}
- /// Constructs an `Arc<T>` from a raw pointer.
+ /// Constructs an `Arc<T, A>` from a raw pointer.
///
/// The raw pointer must have been previously returned by a call to
- /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and
+ /// [`Arc<U, A>::into_raw`][into_raw] where `U` must have the same size and
/// alignment as `T`. This is trivially true if `U` is `T`.
/// Note that if `U` is not `T` but has the same size and alignment, this is
/// basically like transmuting references of different types. See
- /// [`mem::transmute`][transmute] for more information on what
+ /// [`mem::transmute`] for more information on what
/// restrictions apply in this case.
///
+ /// The raw pointer must point to a block of memory allocated by `alloc`
+ ///
/// The user of `from_raw` has to make sure a specific value of `T` is only
/// dropped once.
///
@@ -1037,19 +1506,21 @@ impl<T: ?Sized> Arc<T> {
/// even if the returned `Arc<T>` is never accessed.
///
/// [into_raw]: Arc::into_raw
- /// [transmute]: core::mem::transmute
///
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::sync::Arc;
+ /// use std::alloc::System;
///
- /// let x = Arc::new("hello".to_owned());
+ /// let x = Arc::new_in("hello".to_owned(), System);
/// let x_ptr = Arc::into_raw(x);
///
/// unsafe {
/// // Convert back to an `Arc` to prevent leak.
- /// let x = Arc::from_raw(x_ptr);
+ /// let x = Arc::from_raw_in(x_ptr, System);
/// assert_eq!(&*x, "hello");
///
/// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe.
@@ -1057,15 +1528,16 @@ impl<T: ?Sized> Arc<T> {
///
/// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
/// ```
- #[stable(feature = "rc_raw", since = "1.17.0")]
- pub unsafe fn from_raw(ptr: *const T) -> Self {
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
unsafe {
let offset = data_offset(ptr);
// Reverse the offset to find the original ArcInner.
let arc_ptr = ptr.byte_sub(offset) as *mut ArcInner<T>;
- Self::from_ptr(arc_ptr)
+ Self::from_ptr_in(arc_ptr, alloc)
}
}
@@ -1083,7 +1555,10 @@ impl<T: ?Sized> Arc<T> {
#[must_use = "this returns a new `Weak` pointer, \
without modifying the original `Arc`"]
#[stable(feature = "arc_weak", since = "1.4.0")]
- pub fn downgrade(this: &Self) -> Weak<T> {
+ pub fn downgrade(this: &Self) -> Weak<T, A>
+ where
+ A: Clone,
+ {
// This Relaxed is OK because we're checking the value in the CAS
// below.
let mut cur = this.inner().weak.load(Relaxed);
@@ -1110,7 +1585,7 @@ impl<T: ?Sized> Arc<T> {
Ok(_) => {
// Make sure we do not create a dangling Weak
debug_assert!(!is_dangling(this.ptr.as_ptr()));
- return Weak { ptr: this.ptr };
+ return Weak { ptr: this.ptr, alloc: this.alloc.clone() };
}
Err(old) => cur = old,
}
@@ -1181,30 +1656,37 @@ impl<T: ?Sized> Arc<T> {
///
/// The pointer must have been obtained through `Arc::into_raw`, and the
/// associated `Arc` instance must be valid (i.e. the strong count must be at
- /// least 1) for the duration of this method.
+ /// least 1) for the duration of this method,, and `ptr` must point to a block of memory
+ /// allocated by `alloc`.
///
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::sync::Arc;
+ /// use std::alloc::System;
///
- /// let five = Arc::new(5);
+ /// let five = Arc::new_in(5, System);
///
/// unsafe {
/// let ptr = Arc::into_raw(five);
- /// Arc::increment_strong_count(ptr);
+ /// Arc::increment_strong_count_in(ptr, System);
///
/// // This assertion is deterministic because we haven't shared
/// // the `Arc` between threads.
- /// let five = Arc::from_raw(ptr);
+ /// let five = Arc::from_raw_in(ptr, System);
/// assert_eq!(2, Arc::strong_count(&five));
/// }
/// ```
#[inline]
- #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")]
- pub unsafe fn increment_strong_count(ptr: *const T) {
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
+ where
+ A: Clone,
+ {
// Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
- let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
+ let arc = unsafe { mem::ManuallyDrop::new(Arc::from_raw_in(ptr, alloc)) };
// Now increase refcount, but don't drop new refcount either
let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
}
@@ -1214,35 +1696,39 @@ impl<T: ?Sized> Arc<T> {
///
/// # Safety
///
- /// The pointer must have been obtained through `Arc::into_raw`, and the
+ /// The pointer must have been obtained through `Arc::into_raw`, the
/// associated `Arc` instance must be valid (i.e. the strong count must be at
- /// least 1) when invoking this method. This method can be used to release the final
+ /// least 1) when invoking this method, and `ptr` must point to a block of memory
+ /// allocated by `alloc`. This method can be used to release the final
/// `Arc` and backing storage, but **should not** be called after the final `Arc` has been
/// released.
///
/// # Examples
///
/// ```
+ /// #![feature(allocator_api)]
+ ///
/// use std::sync::Arc;
+ /// use std::alloc::System;
///
- /// let five = Arc::new(5);
+ /// let five = Arc::new_in(5, System);
///
/// unsafe {
/// let ptr = Arc::into_raw(five);
- /// Arc::increment_strong_count(ptr);
+ /// Arc::increment_strong_count_in(ptr, System);
///
/// // Those assertions are deterministic because we haven't shared
/// // the `Arc` between threads.
- /// let five = Arc::from_raw(ptr);
+ /// let five = Arc::from_raw_in(ptr, System);
/// assert_eq!(2, Arc::strong_count(&five));
- /// Arc::decrement_strong_count(ptr);
+ /// Arc::decrement_strong_count_in(ptr, System);
/// assert_eq!(1, Arc::strong_count(&five));
/// }
/// ```
#[inline]
- #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")]
- pub unsafe fn decrement_strong_count(ptr: *const T) {
- unsafe { drop(Arc::from_raw(ptr)) };
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
+ unsafe { drop(Arc::from_raw_in(ptr, alloc)) };
}
#[inline]
@@ -1263,7 +1749,10 @@ impl<T: ?Sized> Arc<T> {
unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
// Drop the weak ref collectively held by all strong references
- drop(Weak { ptr: self.ptr });
+ // Take a reference to `self.alloc` instead of cloning because 1. it'll
+ // last long enough, and 2. you should be able to drop `Arc`s with
+ // unclonable allocators
+ drop(Weak { ptr: self.ptr, alloc: &self.alloc });
}
/// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to
@@ -1345,25 +1834,28 @@ impl<T: ?Sized> Arc<T> {
inner
}
+}
+impl<T: ?Sized, A: Allocator> Arc<T, A> {
/// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
+ #[inline]
#[cfg(not(no_global_oom_handling))]
- unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
+ unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut ArcInner<T> {
// Allocate for the `ArcInner<T>` using the given value.
unsafe {
- Self::allocate_for_layout(
+ Arc::allocate_for_layout(
Layout::for_value(&*ptr),
- |layout| Global.allocate(layout),
+ |layout| alloc.allocate(layout),
|mem| mem.with_metadata_of(ptr as *const ArcInner<T>),
)
}
}
#[cfg(not(no_global_oom_handling))]
- fn from_box(src: Box<T>) -> Arc<T> {
+ fn from_box_in(src: Box<T, A>) -> Arc<T, A> {
unsafe {
let value_size = size_of_val(&*src);
- let ptr = Self::allocate_for_ptr(&*src);
+ let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
// Copy value as bytes
ptr::copy_nonoverlapping(
@@ -1373,10 +1865,11 @@ impl<T: ?Sized> Arc<T> {
);
// Free the allocation without dropping its contents
- let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop<T>);
+ let (bptr, alloc) = Box::into_raw_with_allocator(src);
+ let src = Box::from_raw(bptr as *mut mem::ManuallyDrop<T>);
drop(src);
- Self::from_ptr(ptr)
+ Self::from_ptr_in(ptr, alloc)
}
}
}
@@ -1389,7 +1882,7 @@ impl<T> Arc<[T]> {
Self::allocate_for_layout(
Layout::array::<T>(len).unwrap(),
|layout| Global.allocate(layout),
- |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>,
+ |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut ArcInner<[T]>,
)
}
}
@@ -1458,6 +1951,21 @@ impl<T> Arc<[T]> {
}
}
+impl<T, A: Allocator> Arc<[T], A> {
+ /// Allocates an `ArcInner<[T]>` with the given length.
+ #[inline]
+ #[cfg(not(no_global_oom_handling))]
+ unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut ArcInner<[T]> {
+ unsafe {
+ Arc::allocate_for_layout(
+ Layout::array::<T>(len).unwrap(),
+ |layout| alloc.allocate(layout),
+ |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut ArcInner<[T]>,
+ )
+ }
+ }
+}
+
/// Specialization trait used for `From<&[T]>`.
#[cfg(not(no_global_oom_handling))]
trait ArcFromSlice<T> {
@@ -1481,7 +1989,7 @@ impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Clone for Arc<T> {
+impl<T: ?Sized, A: Allocator + Clone> Clone for Arc<T, A> {
/// Makes a clone of the `Arc` pointer.
///
/// This creates another pointer to the same allocation, increasing the
@@ -1497,7 +2005,7 @@ impl<T: ?Sized> Clone for Arc<T> {
/// let _ = Arc::clone(&five);
/// ```
#[inline]
- fn clone(&self) -> Arc<T> {
+ fn clone(&self) -> Arc<T, A> {
// Using a relaxed ordering is alright here, as knowledge of the
// original reference prevents other threads from erroneously deleting
// the object.
@@ -1530,12 +2038,12 @@ impl<T: ?Sized> Clone for Arc<T> {
abort();
}
- unsafe { Self::from_inner(self.ptr) }
+ unsafe { Self::from_inner_in(self.ptr, self.alloc.clone()) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> Deref for Arc<T> {
+impl<T: ?Sized, A: Allocator> Deref for Arc<T, A> {
type Target = T;
#[inline]
@@ -1547,7 +2055,7 @@ impl<T: ?Sized> Deref for Arc<T> {
#[unstable(feature = "receiver_trait", issue = "none")]
impl<T: ?Sized> Receiver for Arc<T> {}
-impl<T: Clone> Arc<T> {
+impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
/// Makes a mutable reference into the given `Arc`.
///
/// If there are other `Arc` pointers to the same allocation, then `make_mut` will
@@ -1613,7 +2121,7 @@ impl<T: Clone> Arc<T> {
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
// Another strong pointer exists, so we must clone.
// Pre-allocate memory to allow writing the cloned value directly.
- let mut arc = Self::new_uninit();
+ let mut arc = Self::new_uninit_in(this.alloc.clone());
unsafe {
let data = Arc::get_mut_unchecked(&mut arc);
(**this).write_clone_into_raw(data.as_mut_ptr());
@@ -1634,10 +2142,10 @@ impl<T: Clone> Arc<T> {
// Materialize our own implicit weak pointer, so that it can clean
// up the ArcInner as needed.
- let _weak = Weak { ptr: this.ptr };
+ let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() };
// Can just steal the data, all that's left is Weaks
- let mut arc = Self::new_uninit();
+ let mut arc = Self::new_uninit_in(this.alloc.clone());
unsafe {
let data = Arc::get_mut_unchecked(&mut arc);
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
@@ -1690,7 +2198,7 @@ impl<T: Clone> Arc<T> {
}
}
-impl<T: ?Sized> Arc<T> {
+impl<T: ?Sized, A: Allocator> Arc<T, A> {
/// Returns a mutable reference into the given `Arc`, if there are
/// no other `Arc` or [`Weak`] pointers to the same allocation.
///
@@ -1828,7 +2336,7 @@ impl<T: ?Sized> Arc<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
+unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc<T, A> {
/// Drops the `Arc`.
///
/// This will decrement the strong reference count. If the strong reference
@@ -1899,7 +2407,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
}
}
-impl Arc<dyn Any + Send + Sync> {
+impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
/// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
///
/// # Examples
@@ -1920,15 +2428,16 @@ impl Arc<dyn Any + Send + Sync> {
/// ```
#[inline]
#[stable(feature = "rc_downcast", since = "1.29.0")]
- pub fn downcast<T>(self) -> Result<Arc<T>, Self>
+ pub fn downcast<T>(self) -> Result<Arc<T, A>, Self>
where
T: Any + Send + Sync,
{
if (*self).is::<T>() {
unsafe {
let ptr = self.ptr.cast::<ArcInner<T>>();
+ let alloc = self.alloc.clone();
mem::forget(self);
- Ok(Arc::from_inner(ptr))
+ Ok(Arc::from_inner_in(ptr, alloc))
}
} else {
Err(self)
@@ -1963,14 +2472,15 @@ impl Arc<dyn Any + Send + Sync> {
/// [`downcast`]: Self::downcast
#[inline]
#[unstable(feature = "downcast_unchecked", issue = "90850")]
- pub unsafe fn downcast_unchecked<T>(self) -> Arc<T>
+ pub unsafe fn downcast_unchecked<T>(self) -> Arc<T, A>
where
T: Any + Send + Sync,
{
unsafe {
let ptr = self.ptr.cast::<ArcInner<T>>();
+ let alloc = self.alloc.clone();
mem::forget(self);
- Arc::from_inner(ptr)
+ Arc::from_inner_in(ptr, alloc)
}
}
}
@@ -1989,11 +2499,43 @@ impl<T> Weak<T> {
/// let empty: Weak<i64> = Weak::new();
/// assert!(empty.upgrade().is_none());
/// ```
+ #[inline]
#[stable(feature = "downgraded_weak", since = "1.10.0")]
- #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
+ #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
#[must_use]
pub const fn new() -> Weak<T> {
- Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) } }
+ Weak {
+ ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) },
+ alloc: Global,
+ }
+ }
+}
+
+impl<T, A: Allocator> Weak<T, A> {
+ /// Constructs a new `Weak<T, A>`, without allocating any memory, technically in the provided
+ /// allocator.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`upgrade`]: Weak::upgrade
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::sync::Weak;
+ /// use std::alloc::System;
+ ///
+ /// let empty: Weak<i64, _> = Weak::new_in(System);
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn new_in(alloc: A) -> Weak<T, A> {
+ Weak {
+ ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) },
+ alloc,
+ }
}
}
@@ -2005,6 +2547,55 @@ struct WeakInner<'a> {
}
impl<T: ?Sized> Weak<T> {
+ /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
+ ///
+ /// This can be used to safely get a strong reference (by calling [`upgrade`]
+ /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+ ///
+ /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
+ /// as these don't own anything; the method still works on them).
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have originated from the [`into_raw`] and must still own its potential
+ /// weak reference.
+ ///
+ /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
+ /// takes ownership of one weak reference currently represented as a raw pointer (the weak
+ /// count is not modified by this operation) and therefore it must be paired with a previous
+ /// call to [`into_raw`].
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// let strong = Arc::new("hello".to_owned());
+ ///
+ /// let raw_1 = Arc::downgrade(&strong).into_raw();
+ /// let raw_2 = Arc::downgrade(&strong).into_raw();
+ ///
+ /// assert_eq!(2, Arc::weak_count(&strong));
+ ///
+ /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+ /// assert_eq!(1, Arc::weak_count(&strong));
+ ///
+ /// drop(strong);
+ ///
+ /// // Decrement the last weak count.
+ /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+ /// ```
+ ///
+ /// [`new`]: Weak::new
+ /// [`into_raw`]: Weak::into_raw
+ /// [`upgrade`]: Weak::upgrade
+ #[inline]
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ unsafe { Weak::from_raw_in(ptr, Global) }
+ }
+}
+
+impl<T: ?Sized, A: Allocator> Weak<T, A> {
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
///
/// The pointer is valid only if there are some strong references. The pointer may be dangling,
@@ -2082,7 +2673,8 @@ impl<T: ?Sized> Weak<T> {
result
}
- /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
+ /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>` in the provided
+ /// allocator.
///
/// This can be used to safely get a strong reference (by calling [`upgrade`]
/// later) or to deallocate the weak count by dropping the `Weak<T>`.
@@ -2093,7 +2685,7 @@ impl<T: ?Sized> Weak<T> {
/// # Safety
///
/// The pointer must have originated from the [`into_raw`] and must still own its potential
- /// weak reference.
+ /// weak reference, and must point to a block of memory allocated by `alloc`.
///
/// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
/// takes ownership of one weak reference currently represented as a raw pointer (the weak
@@ -2123,8 +2715,9 @@ impl<T: ?Sized> Weak<T> {
/// [`new`]: Weak::new
/// [`into_raw`]: Weak::into_raw
/// [`upgrade`]: Weak::upgrade
- #[stable(feature = "weak_into_raw", since = "1.45.0")]
- pub unsafe fn from_raw(ptr: *const T) -> Self {
+ #[inline]
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
// See Weak::as_ptr for context on how the input pointer is derived.
let ptr = if is_dangling(ptr as *mut T) {
@@ -2140,11 +2733,11 @@ impl<T: ?Sized> Weak<T> {
};
// SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
- Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
+ Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
}
}
-impl<T: ?Sized> Weak<T> {
+impl<T: ?Sized, A: Allocator> Weak<T, A> {
/// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
/// dropping of the inner value if successful.
///
@@ -2171,28 +2764,35 @@ impl<T: ?Sized> Weak<T> {
#[must_use = "this returns a new `Arc`, \
without modifying the original weak pointer"]
#[stable(feature = "arc_weak", since = "1.4.0")]
- pub fn upgrade(&self) -> Option<Arc<T>> {
+ pub fn upgrade(&self) -> Option<Arc<T, A>>
+ where
+ A: Clone,
+ {
+ #[inline]
+ fn checked_increment(n: usize) -> Option<usize> {
+ // Any write of 0 we can observe leaves the field in permanently zero state.
+ if n == 0 {
+ return None;
+ }
+ // See comments in `Arc::clone` for why we do this (for `mem::forget`).
+ assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR);
+ Some(n + 1)
+ }
+
// We use a CAS loop to increment the strong count instead of a
// fetch_add as this function should never take the reference count
// from zero to one.
- self.inner()?
- .strong
- // Relaxed is fine for the failure case because we don't have any expectations about the new state.
- // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner
- // value can be initialized after `Weak` references have already been created. In that case, we
- // expect to observe the fully initialized value.
- .fetch_update(Acquire, Relaxed, |n| {
- // Any write of 0 we can observe leaves the field in permanently zero state.
- if n == 0 {
- return None;
- }
- // See comments in `Arc::clone` for why we do this (for `mem::forget`).
- assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR);
- Some(n + 1)
- })
- .ok()
- // null checked above
- .map(|_| unsafe { Arc::from_inner(self.ptr) })
+ //
+ // Relaxed is fine for the failure case because we don't have any expectations about the new state.
+ // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner
+ // value can be initialized after `Weak` references have already been created. In that case, we
+ // expect to observe the fully initialized value.
+ if self.inner()?.strong.fetch_update(Acquire, Relaxed, checked_increment).is_ok() {
+ // SAFETY: pointer is not null, verified in checked_increment
+ unsafe { Some(Arc::from_inner_in(self.ptr, self.alloc.clone())) }
+ } else {
+ None
+ }
}
/// Gets the number of strong (`Arc`) pointers pointing to this allocation.
@@ -2218,22 +2818,22 @@ impl<T: ?Sized> Weak<T> {
#[must_use]
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn weak_count(&self) -> usize {
- self.inner()
- .map(|inner| {
- let weak = inner.weak.load(Acquire);
- let strong = inner.strong.load(Acquire);
- if strong == 0 {
- 0
- } else {
- // Since we observed that there was at least one strong pointer
- // after reading the weak count, we know that the implicit weak
- // reference (present whenever any strong references are alive)
- // was still around when we observed the weak count, and can
- // therefore safely subtract it.
- weak - 1
- }
- })
- .unwrap_or(0)
+ if let Some(inner) = self.inner() {
+ let weak = inner.weak.load(Acquire);
+ let strong = inner.strong.load(Acquire);
+ if strong == 0 {
+ 0
+ } else {
+ // Since we observed that there was at least one strong pointer
+ // after reading the weak count, we know that the implicit weak
+ // reference (present whenever any strong references are alive)
+ // was still around when we observed the weak count, and can
+ // therefore safely subtract it.
+ weak - 1
+ }
+ } else {
+ 0
+ }
}
/// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
@@ -2303,7 +2903,7 @@ impl<T: ?Sized> Weak<T> {
}
#[stable(feature = "arc_weak", since = "1.4.0")]
-impl<T: ?Sized> Clone for Weak<T> {
+impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
/// Makes a clone of the `Weak` pointer that points to the same allocation.
///
/// # Examples
@@ -2316,11 +2916,11 @@ impl<T: ?Sized> Clone for Weak<T> {
/// let _ = Weak::clone(&weak_five);
/// ```
#[inline]
- fn clone(&self) -> Weak<T> {
+ fn clone(&self) -> Weak<T, A> {
let inner = if let Some(inner) = self.inner() {
inner
} else {
- return Weak { ptr: self.ptr };
+ return Weak { ptr: self.ptr, alloc: self.alloc.clone() };
};
// See comments in Arc::clone() for why this is relaxed. This can use a
// fetch_add (ignoring the lock) because the weak count is only locked
@@ -2333,7 +2933,7 @@ impl<T: ?Sized> Clone for Weak<T> {
abort();
}
- Weak { ptr: self.ptr }
+ Weak { ptr: self.ptr, alloc: self.alloc.clone() }
}
}
@@ -2359,7 +2959,7 @@ impl<T> Default for Weak<T> {
}
#[stable(feature = "arc_weak", since = "1.4.0")]
-unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> {
+unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
/// Drops the `Weak` pointer.
///
/// # Examples
@@ -2397,25 +2997,27 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> {
if inner.weak.fetch_sub(1, Release) == 1 {
acquire!(inner.weak);
- unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) }
+ unsafe {
+ self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()))
+ }
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-trait ArcEqIdent<T: ?Sized + PartialEq> {
- fn eq(&self, other: &Arc<T>) -> bool;
- fn ne(&self, other: &Arc<T>) -> bool;
+trait ArcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
+ fn eq(&self, other: &Arc<T, A>) -> bool;
+ fn ne(&self, other: &Arc<T, A>) -> bool;
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
+impl<T: ?Sized + PartialEq, A: Allocator> ArcEqIdent<T, A> for Arc<T, A> {
#[inline]
- default fn eq(&self, other: &Arc<T>) -> bool {
+ default fn eq(&self, other: &Arc<T, A>) -> bool {
**self == **other
}
#[inline]
- default fn ne(&self, other: &Arc<T>) -> bool {
+ default fn ne(&self, other: &Arc<T, A>) -> bool {
**self != **other
}
}
@@ -2428,20 +3030,20 @@ impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
///
/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + crate::rc::MarkerEq> ArcEqIdent<T> for Arc<T> {
+impl<T: ?Sized + crate::rc::MarkerEq, A: Allocator> ArcEqIdent<T, A> for Arc<T, A> {
#[inline]
- fn eq(&self, other: &Arc<T>) -> bool {
+ fn eq(&self, other: &Arc<T, A>) -> bool {
Arc::ptr_eq(self, other) || **self == **other
}
#[inline]
- fn ne(&self, other: &Arc<T>) -> bool {
+ fn ne(&self, other: &Arc<T, A>) -> bool {
!Arc::ptr_eq(self, other) && **self != **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
+impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Arc<T, A> {
/// Equality for two `Arc`s.
///
/// Two `Arc`s are equal if their inner values are equal, even if they are
@@ -2460,7 +3062,7 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
/// assert!(five == Arc::new(5));
/// ```
#[inline]
- fn eq(&self, other: &Arc<T>) -> bool {
+ fn eq(&self, other: &Arc<T, A>) -> bool {
ArcEqIdent::eq(self, other)
}
@@ -2481,13 +3083,13 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
/// assert!(five != Arc::new(6));
/// ```
#[inline]
- fn ne(&self, other: &Arc<T>) -> bool {
+ fn ne(&self, other: &Arc<T, A>) -> bool {
ArcEqIdent::ne(self, other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
+impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Arc<T, A> {
/// Partial comparison for two `Arc`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
@@ -2502,7 +3104,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
/// ```
- fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
+ fn partial_cmp(&self, other: &Arc<T, A>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
@@ -2519,7 +3121,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// assert!(five < Arc::new(6));
/// ```
- fn lt(&self, other: &Arc<T>) -> bool {
+ fn lt(&self, other: &Arc<T, A>) -> bool {
*(*self) < *(*other)
}
@@ -2536,7 +3138,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// assert!(five <= Arc::new(5));
/// ```
- fn le(&self, other: &Arc<T>) -> bool {
+ fn le(&self, other: &Arc<T, A>) -> bool {
*(*self) <= *(*other)
}
@@ -2553,7 +3155,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// assert!(five > Arc::new(4));
/// ```
- fn gt(&self, other: &Arc<T>) -> bool {
+ fn gt(&self, other: &Arc<T, A>) -> bool {
*(*self) > *(*other)
}
@@ -2570,12 +3172,12 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// assert!(five >= Arc::new(5));
/// ```
- fn ge(&self, other: &Arc<T>) -> bool {
+ fn ge(&self, other: &Arc<T, A>) -> bool {
*(*self) >= *(*other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Ord> Ord for Arc<T> {
+impl<T: ?Sized + Ord, A: Allocator> Ord for Arc<T, A> {
/// Comparison for two `Arc`s.
///
/// The two are compared by calling `cmp()` on their inner values.
@@ -2590,29 +3192,29 @@ impl<T: ?Sized + Ord> Ord for Arc<T> {
///
/// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
/// ```
- fn cmp(&self, other: &Arc<T>) -> Ordering {
+ fn cmp(&self, other: &Arc<T, A>) -> Ordering {
(**self).cmp(&**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Eq> Eq for Arc<T> {}
+impl<T: ?Sized + Eq, A: Allocator> Eq for Arc<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
+impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Arc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
+impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Arc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> fmt::Pointer for Arc<T> {
+impl<T: ?Sized, A: Allocator> fmt::Pointer for Arc<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&(&**self as *const T), f)
}
@@ -2637,7 +3239,7 @@ impl<T: Default> Default for Arc<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Hash> Hash for Arc<T> {
+impl<T: ?Sized + Hash, A: Allocator> Hash for Arc<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state)
}
@@ -2724,7 +3326,7 @@ impl From<String> for Arc<str> {
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T: ?Sized> From<Box<T>> for Arc<T> {
+impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Arc<T, A> {
/// Move a boxed object to a new, reference-counted allocation.
///
/// # Example
@@ -2736,14 +3338,14 @@ impl<T: ?Sized> From<Box<T>> for Arc<T> {
/// assert_eq!("eggplant", &shared[..]);
/// ```
#[inline]
- fn from(v: Box<T>) -> Arc<T> {
- Arc::from_box(v)
+ fn from(v: Box<T, A>) -> Arc<T, A> {
+ Arc::from_box_in(v)
}
}
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "shared_from_slice", since = "1.21.0")]
-impl<T> From<Vec<T>> for Arc<[T]> {
+impl<T, A: Allocator + Clone> From<Vec<T, A>> for Arc<[T], A> {
/// Allocate a reference-counted slice and move `v`'s items into it.
///
/// # Example
@@ -2755,12 +3357,18 @@ impl<T> From<Vec<T>> for Arc<[T]> {
/// assert_eq!(&[1, 2, 3], &shared[..]);
/// ```
#[inline]
- fn from(mut v: Vec<T>) -> Arc<[T]> {
+ fn from(v: Vec<T, A>) -> Arc<[T], A> {
unsafe {
- let rc = Arc::copy_from_slice(&v);
- // Allow the Vec to free its memory, but not destroy its contents
- v.set_len(0);
- rc
+ let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
+
+ let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
+ ptr::copy_nonoverlapping(vec_ptr, &mut (*rc_ptr).data as *mut [T] as *mut T, len);
+
+ // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
+ // without dropping its contents or the allocator
+ let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
+
+ Self::from_ptr_in(rc_ptr, alloc)
}
}
}
@@ -2812,12 +3420,13 @@ impl From<Arc<str>> for Arc<[u8]> {
}
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
-impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]> {
- type Error = Arc<[T]>;
+impl<T, A: Allocator + Clone, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
+ type Error = Arc<[T], A>;
- fn try_from(boxed_slice: Arc<[T]>) -> Result<Self, Self::Error> {
+ fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> {
if boxed_slice.len() == N {
- Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) })
+ let alloc = boxed_slice.alloc.clone();
+ Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) })
} else {
Err(boxed_slice)
}
@@ -2910,21 +3519,21 @@ impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
+impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Arc<T, A> {
fn borrow(&self) -> &T {
&**self
}
}
#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
-impl<T: ?Sized> AsRef<T> for Arc<T> {
+impl<T: ?Sized, A: Allocator> AsRef<T> for Arc<T, A> {
fn as_ref(&self) -> &T {
&**self
}
}
#[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Arc<T> {}
+impl<T: ?Sized, A: Allocator> Unpin for Arc<T, A> {}
/// Get the offset within an `ArcInner` for the payload behind a pointer.
///
@@ -2964,7 +3573,7 @@ impl<T: core::error::Error + ?Sized> core::error::Error for Arc<T> {
core::error::Error::source(&**self)
}
- fn provide<'a>(&'a self, req: &mut core::any::Demand<'a>) {
+ fn provide<'a>(&'a self, req: &mut core::error::Request<'a>) {
core::error::Error::provide(&**self, req);
}
}
diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs
index 598ecf05e..e45ddc789 100644
--- a/library/alloc/src/vec/mod.rs
+++ b/library/alloc/src/vec/mod.rs
@@ -213,7 +213,7 @@ mod spec_extend;
///
/// # Indexing
///
-/// The `Vec` type allows to access values by index, because it implements the
+/// The `Vec` type allows access to values by index, because it implements the
/// [`Index`] trait. An example will be more explicit:
///
/// ```
@@ -2961,7 +2961,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// [`copy_from_slice`]: slice::copy_from_slice
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec<T, A> {
+impl<'a, T: Copy + 'a, A: Allocator> Extend<&'a T> for Vec<T, A> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.spec_extend(iter.into_iter())
}
diff --git a/library/alloc/src/vec/spec_extend.rs b/library/alloc/src/vec/spec_extend.rs
index 56065ce56..e2f865d0f 100644
--- a/library/alloc/src/vec/spec_extend.rs
+++ b/library/alloc/src/vec/spec_extend.rs
@@ -36,7 +36,7 @@ impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
}
}
-impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec<T, A>
+impl<'a, T: 'a, I, A: Allocator> SpecExtend<&'a T, I> for Vec<T, A>
where
I: Iterator<Item = &'a T>,
T: Clone,
@@ -46,7 +46,7 @@ where
}
}
-impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
+impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
where
T: Copy,
{
diff --git a/library/alloc/tests/str.rs b/library/alloc/tests/str.rs
index 0ba5d088f..cb59a9d4a 100644
--- a/library/alloc/tests/str.rs
+++ b/library/alloc/tests/str.rs
@@ -1,4 +1,4 @@
-#![cfg_attr(not(bootstrap), allow(invalid_from_utf8))]
+#![allow(invalid_from_utf8)]
use std::assert_matches::assert_matches;
use std::borrow::Cow;
@@ -1739,6 +1739,28 @@ fn test_utf16_code_units() {
}
#[test]
+fn test_utf16_size_hint() {
+ assert_eq!("".encode_utf16().size_hint(), (0, Some(0)));
+ assert_eq!("123".encode_utf16().size_hint(), (1, Some(3)));
+ assert_eq!("1234".encode_utf16().size_hint(), (2, Some(4)));
+ assert_eq!("12345678".encode_utf16().size_hint(), (3, Some(8)));
+
+ fn hint_vec(src: &str) -> Vec<(usize, Option<usize>)> {
+ let mut it = src.encode_utf16();
+ let mut result = Vec::new();
+ result.push(it.size_hint());
+ while it.next().is_some() {
+ result.push(it.size_hint())
+ }
+ result
+ }
+
+ assert_eq!(hint_vec("12"), [(1, Some(2)), (1, Some(1)), (0, Some(0))]);
+ assert_eq!(hint_vec("\u{101234}"), [(2, Some(4)), (1, Some(1)), (0, Some(0))]);
+ assert_eq!(hint_vec("\u{101234}a"), [(2, Some(5)), (2, Some(2)), (1, Some(1)), (0, Some(0))]);
+}
+
+#[test]
fn starts_with_in_unicode() {
assert!(!"├── Cargo.toml".starts_with("# "));
}
@@ -2416,10 +2438,7 @@ fn ceil_char_boundary() {
check_many("🇯🇵", 0..=0, 0);
check_many("🇯🇵", 1..=4, 4);
check_many("🇯🇵", 5..=8, 8);
-}
-#[test]
-#[should_panic]
-fn ceil_char_boundary_above_len_panic() {
- let _ = "x".ceil_char_boundary(2);
+ // above len
+ check_many("hello", 5..=10, 5);
}
diff --git a/library/alloc/tests/string.rs b/library/alloc/tests/string.rs
index 17d56d491..711e4eef2 100644
--- a/library/alloc/tests/string.rs
+++ b/library/alloc/tests/string.rs
@@ -368,29 +368,73 @@ fn remove_bad() {
#[test]
fn test_remove_matches() {
+ // test_single_pattern_occurrence
let mut s = "abc".to_string();
-
s.remove_matches('b');
assert_eq!(s, "ac");
+ // repeat_test_single_pattern_occurrence
s.remove_matches('b');
assert_eq!(s, "ac");
+ // test_single_character_pattern
let mut s = "abcb".to_string();
-
s.remove_matches('b');
assert_eq!(s, "ac");
+ // test_pattern_with_special_characters
let mut s = "ศไทย中华Việt Nam; foobarศ".to_string();
s.remove_matches('ศ');
assert_eq!(s, "ไทย中华Việt Nam; foobar");
+ // test_pattern_empty_text_and_pattern
let mut s = "".to_string();
s.remove_matches("");
assert_eq!(s, "");
+ // test_pattern_empty_text
+ let mut s = "".to_string();
+ s.remove_matches("something");
+ assert_eq!(s, "");
+
+ // test_empty_pattern
+ let mut s = "Testing with empty pattern.".to_string();
+ s.remove_matches("");
+ assert_eq!(s, "Testing with empty pattern.");
+
+ // test_multiple_consecutive_patterns_1
let mut s = "aaaaa".to_string();
s.remove_matches('a');
assert_eq!(s, "");
+
+ // test_multiple_consecutive_patterns_2
+ let mut s = "Hello **world****today!**".to_string();
+ s.remove_matches("**");
+ assert_eq!(s, "Hello worldtoday!");
+
+ // test_case_insensitive_pattern
+ let mut s = "CASE ** SeNsItIvE ** PaTtErN.".to_string();
+ s.remove_matches("sEnSiTiVe");
+ assert_eq!(s, "CASE ** SeNsItIvE ** PaTtErN.");
+
+ // test_pattern_with_digits
+ let mut s = "123 ** 456 ** 789".to_string();
+ s.remove_matches("**");
+ assert_eq!(s, "123 456 789");
+
+ // test_pattern_occurs_after_empty_string
+ let mut s = "abc X defXghi".to_string();
+ s.remove_matches("X");
+ assert_eq!(s, "abc defghi");
+
+ // test_large_pattern
+ let mut s = "aaaXbbbXcccXdddXeee".to_string();
+ s.remove_matches("X");
+ assert_eq!(s, "aaabbbcccdddeee");
+
+ // test_pattern_at_multiple_positions
+ let mut s = "Pattern ** found ** multiple ** times ** in ** text.".to_string();
+ s.remove_matches("**");
+ assert_eq!(s, "Pattern found multiple times in text.");
}
#[test]
diff --git a/library/alloc/tests/vec.rs b/library/alloc/tests/vec.rs
index ddd93e9a4..183dd8e6e 100644
--- a/library/alloc/tests/vec.rs
+++ b/library/alloc/tests/vec.rs
@@ -2498,3 +2498,68 @@ fn test_into_flattened_size_overflow() {
let v = vec![[(); usize::MAX]; 2];
let _ = v.into_flattened();
}
+
+#[cfg(not(bootstrap))]
+#[test]
+fn test_box_zero_allocator() {
+ use core::{alloc::AllocError, cell::RefCell};
+ use std::collections::HashSet;
+
+ // Track ZST allocations and ensure that they all have a matching free.
+ struct ZstTracker {
+ state: RefCell<(HashSet<usize>, usize)>,
+ }
+ unsafe impl Allocator for ZstTracker {
+ fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+ let ptr = if layout.size() == 0 {
+ let mut state = self.state.borrow_mut();
+ let addr = state.1;
+ assert!(state.0.insert(addr));
+ state.1 += 1;
+ std::println!("allocating {addr}");
+ std::ptr::invalid_mut(addr)
+ } else {
+ unsafe { std::alloc::alloc(layout) }
+ };
+ Ok(NonNull::slice_from_raw_parts(NonNull::new(ptr).ok_or(AllocError)?, layout.size()))
+ }
+
+ unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+ if layout.size() == 0 {
+ let addr = ptr.as_ptr() as usize;
+ let mut state = self.state.borrow_mut();
+ std::println!("freeing {addr}");
+ assert!(state.0.remove(&addr), "ZST free that wasn't allocated");
+ } else {
+ unsafe { std::alloc::dealloc(ptr.as_ptr(), layout) }
+ }
+ }
+ }
+
+ // Start the state at 100 to avoid returning null pointers.
+ let alloc = ZstTracker { state: RefCell::new((HashSet::new(), 100)) };
+
+ // Ensure that unsizing retains the same behavior.
+ {
+ let b1: Box<[u8; 0], &ZstTracker> = Box::new_in([], &alloc);
+ let b2: Box<[u8], &ZstTracker> = b1.clone();
+ let _b3: Box<[u8], &ZstTracker> = b2.clone();
+ }
+
+ // Ensure that shrinking doesn't leak a ZST allocation.
+ {
+ let mut v1: Vec<u8, &ZstTracker> = Vec::with_capacity_in(100, &alloc);
+ v1.shrink_to_fit();
+ }
+
+ // Ensure that conversion to/from vec works.
+ {
+ let v1: Vec<(), &ZstTracker> = Vec::with_capacity_in(100, &alloc);
+ let _b1: Box<[()], &ZstTracker> = v1.into_boxed_slice();
+ let b2: Box<[()], &ZstTracker> = Box::new_in([(), (), ()], &alloc);
+ let _v2: Vec<(), &ZstTracker> = b2.into();
+ }
+
+ // Ensure all ZSTs have been freed.
+ assert!(alloc.state.borrow().0.is_empty());
+}