summaryrefslogtreecommitdiffstats
path: root/library/alloc/src
diff options
context:
space:
mode:
Diffstat (limited to 'library/alloc/src')
-rw-r--r--library/alloc/src/boxed.rs9
-rw-r--r--library/alloc/src/boxed/thin.rs8
-rw-r--r--library/alloc/src/collections/binary_heap/mod.rs24
-rw-r--r--library/alloc/src/collections/binary_heap/tests.rs19
-rw-r--r--library/alloc/src/collections/btree/borrow.rs22
-rw-r--r--library/alloc/src/collections/btree/map.rs730
-rw-r--r--library/alloc/src/collections/btree/map/entry.rs40
-rw-r--r--library/alloc/src/collections/btree/map/tests.rs49
-rw-r--r--library/alloc/src/collections/btree/navigate.rs55
-rw-r--r--library/alloc/src/collections/btree/node.rs102
-rw-r--r--library/alloc/src/collections/vec_deque/into_iter.rs185
-rw-r--r--library/alloc/src/ffi/c_str.rs8
-rw-r--r--library/alloc/src/fmt.rs2
-rw-r--r--library/alloc/src/lib.rs3
-rw-r--r--library/alloc/src/macros.rs2
-rw-r--r--library/alloc/src/raw_vec.rs17
-rw-r--r--library/alloc/src/rc.rs4
-rw-r--r--library/alloc/src/slice.rs43
-rw-r--r--library/alloc/src/string.rs16
-rw-r--r--library/alloc/src/sync.rs149
-rw-r--r--library/alloc/src/sync/tests.rs32
-rw-r--r--library/alloc/src/vec/mod.rs40
22 files changed, 1430 insertions, 129 deletions
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs
index a563b2587..44a378990 100644
--- a/library/alloc/src/boxed.rs
+++ b/library/alloc/src/boxed.rs
@@ -283,9 +283,7 @@ impl<T> Box<T> {
#[must_use]
#[inline(always)]
pub fn pin(x: T) -> Pin<Box<T>> {
- (#[rustc_box]
- Box::new(x))
- .into()
+ Box::new(x).into()
}
/// Allocates memory on the heap then places `x` into it,
@@ -1242,8 +1240,8 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Box<T> {
/// Creates a `Box<T>`, with the `Default` value for T.
+ #[inline]
fn default() -> Self {
- #[rustc_box]
Box::new(T::default())
}
}
@@ -1252,6 +1250,7 @@ impl<T: Default> Default for Box<T> {
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
impl<T> const Default for Box<[T]> {
+ #[inline]
fn default() -> Self {
let ptr: Unique<[T]> = Unique::<[T; 0]>::dangling();
Box(ptr, Global)
@@ -1262,6 +1261,7 @@ impl<T> const Default for Box<[T]> {
#[stable(feature = "default_box_extra", since = "1.17.0")]
#[rustc_const_unstable(feature = "const_default_impls", issue = "87864")]
impl const Default for Box<str> {
+ #[inline]
fn default() -> Self {
// SAFETY: This is the same as `Unique::cast<U>` but with an unsized `U = str`.
let ptr: Unique<str> = unsafe {
@@ -1616,7 +1616,6 @@ impl<T, const N: usize> From<[T; N]> for Box<[T]> {
/// println!("{boxed:?}");
/// ```
fn from(array: [T; N]) -> Box<[T]> {
- #[rustc_box]
Box::new(array)
}
}
diff --git a/library/alloc/src/boxed/thin.rs b/library/alloc/src/boxed/thin.rs
index c1a82e452..ad48315fd 100644
--- a/library/alloc/src/boxed/thin.rs
+++ b/library/alloc/src/boxed/thin.rs
@@ -48,7 +48,7 @@ unsafe impl<T: ?Sized + Sync> Sync for ThinBox<T> {}
#[unstable(feature = "thin_box", issue = "92791")]
impl<T> ThinBox<T> {
- /// Moves a type to the heap with its `Metadata` stored in the heap allocation instead of on
+ /// Moves a type to the heap with its [`Metadata`] stored in the heap allocation instead of on
/// the stack.
///
/// # Examples
@@ -59,6 +59,8 @@ impl<T> ThinBox<T> {
///
/// let five = ThinBox::new(5);
/// ```
+ ///
+ /// [`Metadata`]: core::ptr::Pointee::Metadata
#[cfg(not(no_global_oom_handling))]
pub fn new(value: T) -> Self {
let meta = ptr::metadata(&value);
@@ -69,7 +71,7 @@ impl<T> ThinBox<T> {
#[unstable(feature = "thin_box", issue = "92791")]
impl<Dyn: ?Sized> ThinBox<Dyn> {
- /// Moves a type to the heap with its `Metadata` stored in the heap allocation instead of on
+ /// Moves a type to the heap with its [`Metadata`] stored in the heap allocation instead of on
/// the stack.
///
/// # Examples
@@ -80,6 +82,8 @@ impl<Dyn: ?Sized> ThinBox<Dyn> {
///
/// let thin_slice = ThinBox::<[i32]>::new_unsize([1, 2, 3, 4]);
/// ```
+ ///
+ /// [`Metadata`]: core::ptr::Pointee::Metadata
#[cfg(not(no_global_oom_handling))]
pub fn new_unsize<T>(value: T) -> Self
where
diff --git a/library/alloc/src/collections/binary_heap/mod.rs b/library/alloc/src/collections/binary_heap/mod.rs
index 0b73b1af4..f1d0a305d 100644
--- a/library/alloc/src/collections/binary_heap/mod.rs
+++ b/library/alloc/src/collections/binary_heap/mod.rs
@@ -851,18 +851,30 @@ impl<T: Ord> BinaryHeap<T> {
where
F: FnMut(&T) -> bool,
{
- let mut first_removed = self.len();
+ struct RebuildOnDrop<'a, T: Ord> {
+ heap: &'a mut BinaryHeap<T>,
+ first_removed: usize,
+ }
+
+ let mut guard = RebuildOnDrop { first_removed: self.len(), heap: self };
+
let mut i = 0;
- self.data.retain(|e| {
+ guard.heap.data.retain(|e| {
let keep = f(e);
- if !keep && i < first_removed {
- first_removed = i;
+ if !keep && i < guard.first_removed {
+ guard.first_removed = i;
}
i += 1;
keep
});
- // data[0..first_removed] is untouched, so we only need to rebuild the tail:
- self.rebuild_tail(first_removed);
+
+ impl<'a, T: Ord> Drop for RebuildOnDrop<'a, T> {
+ fn drop(&mut self) {
+ // data[..first_removed] is untouched, so we only need to
+ // rebuild the tail:
+ self.heap.rebuild_tail(self.first_removed);
+ }
+ }
}
}
diff --git a/library/alloc/src/collections/binary_heap/tests.rs b/library/alloc/src/collections/binary_heap/tests.rs
index ffbb6c80a..500caa356 100644
--- a/library/alloc/src/collections/binary_heap/tests.rs
+++ b/library/alloc/src/collections/binary_heap/tests.rs
@@ -474,6 +474,25 @@ fn test_retain() {
assert!(a.is_empty());
}
+#[test]
+fn test_retain_catch_unwind() {
+ let mut heap = BinaryHeap::from(vec![3, 1, 2]);
+
+ // Removes the 3, then unwinds out of retain.
+ let _ = catch_unwind(AssertUnwindSafe(|| {
+ heap.retain(|e| {
+ if *e == 1 {
+ panic!();
+ }
+ false
+ });
+ }));
+
+ // Naively this would be [1, 2] (an invalid heap) if BinaryHeap delegates to
+ // Vec's retain impl and then does not rebuild the heap after that unwinds.
+ assert_eq!(heap.into_vec(), [2, 1]);
+}
+
// old binaryheap failed this test
//
// Integrity means that all elements are present after a comparison panics,
diff --git a/library/alloc/src/collections/btree/borrow.rs b/library/alloc/src/collections/btree/borrow.rs
index 016f139a5..000b9bd0f 100644
--- a/library/alloc/src/collections/btree/borrow.rs
+++ b/library/alloc/src/collections/btree/borrow.rs
@@ -41,6 +41,28 @@ impl<'a, T> DormantMutRef<'a, T> {
// SAFETY: our own safety conditions imply this reference is again unique.
unsafe { &mut *self.ptr.as_ptr() }
}
+
+ /// Borrows a new mutable reference from the unique borrow initially captured.
+ ///
+ /// # Safety
+ ///
+ /// The reborrow must have ended, i.e., the reference returned by `new` and
+ /// all pointers and references derived from it, must not be used anymore.
+ pub unsafe fn reborrow(&mut self) -> &'a mut T {
+ // SAFETY: our own safety conditions imply this reference is again unique.
+ unsafe { &mut *self.ptr.as_ptr() }
+ }
+
+ /// Borrows a new shared reference from the unique borrow initially captured.
+ ///
+ /// # Safety
+ ///
+ /// The reborrow must have ended, i.e., the reference returned by `new` and
+ /// all pointers and references derived from it, must not be used anymore.
+ pub unsafe fn reborrow_shared(&self) -> &'a T {
+ // SAFETY: our own safety conditions imply this reference is again unique.
+ unsafe { &*self.ptr.as_ptr() }
+ }
}
#[cfg(test)]
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index 1d9c4460e..386cd1a16 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -6,7 +6,7 @@ use core::hash::{Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop};
-use core::ops::{Index, RangeBounds};
+use core::ops::{Bound, Index, RangeBounds};
use core::ptr;
use crate::alloc::{Allocator, Global};
@@ -15,7 +15,7 @@ use super::borrow::DormantMutRef;
use super::dedup_sorted_iter::DedupSortedIter;
use super::navigate::{LazyLeafRange, LeafRange};
use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root};
-use super::search::SearchResult::*;
+use super::search::{SearchBound, SearchResult::*};
use super::set_val::SetValZST;
mod entry;
@@ -2422,6 +2422,732 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
pub const fn is_empty(&self) -> bool {
self.len() == 0
}
+
+ /// Returns a [`Cursor`] pointing at the first element that is above the
+ /// given bound.
+ ///
+ /// If no such element exists then a cursor pointing at the "ghost"
+ /// non-element is returned.
+ ///
+ /// Passing [`Bound::Unbounded`] will return a cursor pointing at the first
+ /// element of the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(btree_cursors)]
+ ///
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(4, "c");
+ /// let cursor = a.lower_bound(Bound::Excluded(&2));
+ /// assert_eq!(cursor.key(), Some(&3));
+ /// ```
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn lower_bound<Q>(&self, bound: Bound<&Q>) -> Cursor<'_, K, V>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let root_node = match self.root.as_ref() {
+ None => return Cursor { current: None, root: None },
+ Some(root) => root.reborrow(),
+ };
+ let edge = root_node.lower_bound(SearchBound::from_range(bound));
+ Cursor { current: edge.next_kv().ok(), root: self.root.as_ref() }
+ }
+
+ /// Returns a [`CursorMut`] pointing at the first element that is above the
+ /// given bound.
+ ///
+ /// If no such element exists then a cursor pointing at the "ghost"
+ /// non-element is returned.
+ ///
+ /// Passing [`Bound::Unbounded`] will return a cursor pointing at the first
+ /// element of the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(btree_cursors)]
+ ///
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(4, "c");
+ /// let cursor = a.lower_bound_mut(Bound::Excluded(&2));
+ /// assert_eq!(cursor.key(), Some(&3));
+ /// ```
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn lower_bound_mut<Q>(&mut self, bound: Bound<&Q>) -> CursorMut<'_, K, V, A>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let (root, dormant_root) = DormantMutRef::new(&mut self.root);
+ let root_node = match root.as_mut() {
+ None => {
+ return CursorMut {
+ current: None,
+ root: dormant_root,
+ length: &mut self.length,
+ alloc: &mut *self.alloc,
+ };
+ }
+ Some(root) => root.borrow_mut(),
+ };
+ let edge = root_node.lower_bound(SearchBound::from_range(bound));
+ CursorMut {
+ current: edge.next_kv().ok(),
+ root: dormant_root,
+ length: &mut self.length,
+ alloc: &mut *self.alloc,
+ }
+ }
+
+ /// Returns a [`Cursor`] pointing at the last element that is below the
+ /// given bound.
+ ///
+ /// If no such element exists then a cursor pointing at the "ghost"
+ /// non-element is returned.
+ ///
+ /// Passing [`Bound::Unbounded`] will return a cursor pointing at the last
+ /// element of the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(btree_cursors)]
+ ///
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(4, "c");
+ /// let cursor = a.upper_bound(Bound::Excluded(&3));
+ /// assert_eq!(cursor.key(), Some(&2));
+ /// ```
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn upper_bound<Q>(&self, bound: Bound<&Q>) -> Cursor<'_, K, V>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let root_node = match self.root.as_ref() {
+ None => return Cursor { current: None, root: None },
+ Some(root) => root.reborrow(),
+ };
+ let edge = root_node.upper_bound(SearchBound::from_range(bound));
+ Cursor { current: edge.next_back_kv().ok(), root: self.root.as_ref() }
+ }
+
+ /// Returns a [`CursorMut`] pointing at the last element that is below the
+ /// given bound.
+ ///
+ /// If no such element exists then a cursor pointing at the "ghost"
+ /// non-element is returned.
+ ///
+ /// Passing [`Bound::Unbounded`] will return a cursor pointing at the last
+ /// element of the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(btree_cursors)]
+ ///
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(4, "c");
+ /// let cursor = a.upper_bound_mut(Bound::Excluded(&3));
+ /// assert_eq!(cursor.key(), Some(&2));
+ /// ```
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn upper_bound_mut<Q>(&mut self, bound: Bound<&Q>) -> CursorMut<'_, K, V, A>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let (root, dormant_root) = DormantMutRef::new(&mut self.root);
+ let root_node = match root.as_mut() {
+ None => {
+ return CursorMut {
+ current: None,
+ root: dormant_root,
+ length: &mut self.length,
+ alloc: &mut *self.alloc,
+ };
+ }
+ Some(root) => root.borrow_mut(),
+ };
+ let edge = root_node.upper_bound(SearchBound::from_range(bound));
+ CursorMut {
+ current: edge.next_back_kv().ok(),
+ root: dormant_root,
+ length: &mut self.length,
+ alloc: &mut *self.alloc,
+ }
+ }
+}
+
+/// A cursor over a `BTreeMap`.
+///
+/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth.
+///
+/// Cursors always point to an element in the tree, and index in a logically circular way.
+/// To accommodate this, there is a "ghost" non-element that yields `None` between the last and
+/// first elements of the tree.
+///
+/// A `Cursor` is created with the [`BTreeMap::lower_bound`] and [`BTreeMap::upper_bound`] methods.
+#[unstable(feature = "btree_cursors", issue = "107540")]
+pub struct Cursor<'a, K: 'a, V: 'a> {
+ current: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>, marker::KV>>,
+ root: Option<&'a node::Root<K, V>>,
+}
+
+#[unstable(feature = "btree_cursors", issue = "107540")]
+impl<K, V> Clone for Cursor<'_, K, V> {
+ fn clone(&self) -> Self {
+ let Cursor { current, root } = *self;
+ Cursor { current, root }
+ }
+}
+
+#[unstable(feature = "btree_cursors", issue = "107540")]
+impl<K: Debug, V: Debug> Debug for Cursor<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Cursor").field(&self.key_value()).finish()
+ }
+}
+
+/// A cursor over a `BTreeMap` with editing operations.
+///
+/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth, and can
+/// safely mutate the tree during iteration. This is because the lifetime of its yielded
+/// references is tied to its own lifetime, instead of just the underlying tree. This means
+/// cursors cannot yield multiple elements at once.
+///
+/// Cursors always point to an element in the tree, and index in a logically circular way.
+/// To accommodate this, there is a "ghost" non-element that yields `None` between the last and
+/// first elements of the tree.
+///
+/// A `Cursor` is created with the [`BTreeMap::lower_bound_mut`] and [`BTreeMap::upper_bound_mut`]
+/// methods.
+#[unstable(feature = "btree_cursors", issue = "107540")]
+pub struct CursorMut<
+ 'a,
+ K: 'a,
+ V: 'a,
+ #[unstable(feature = "allocator_api", issue = "32838")] A = Global,
+> {
+ current: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>>,
+ root: DormantMutRef<'a, Option<node::Root<K, V>>>,
+ length: &'a mut usize,
+ alloc: &'a mut A,
+}
+
+#[unstable(feature = "btree_cursors", issue = "107540")]
+impl<K: Debug, V: Debug, A> Debug for CursorMut<'_, K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("CursorMut").field(&self.key_value()).finish()
+ }
+}
+
+impl<'a, K, V> Cursor<'a, K, V> {
+ /// Moves the cursor to the next element of the `BTreeMap`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the first element of the `BTreeMap`. If it is pointing to the last
+ /// element of the `BTreeMap` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn move_next(&mut self) {
+ match self.current.take() {
+ None => {
+ self.current = self.root.and_then(|root| {
+ root.reborrow().first_leaf_edge().forget_node_type().right_kv().ok()
+ });
+ }
+ Some(current) => {
+ self.current = current.next_leaf_edge().next_kv().ok();
+ }
+ }
+ }
+
+ /// Moves the cursor to the previous element of the `BTreeMap`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the last element of the `BTreeMap`. If it is pointing to the first
+ /// element of the `BTreeMap` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn move_prev(&mut self) {
+ match self.current.take() {
+ None => {
+ self.current = self.root.and_then(|root| {
+ root.reborrow().last_leaf_edge().forget_node_type().left_kv().ok()
+ });
+ }
+ Some(current) => {
+ self.current = current.next_back_leaf_edge().next_back_kv().ok();
+ }
+ }
+ }
+
+ /// Returns a reference to the key of the element that the cursor is
+ /// currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn key(&self) -> Option<&'a K> {
+ self.current.as_ref().map(|current| current.into_kv().0)
+ }
+
+ /// Returns a reference to the value of the element that the cursor is
+ /// currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn value(&self) -> Option<&'a V> {
+ self.current.as_ref().map(|current| current.into_kv().1)
+ }
+
+ /// Returns a reference to the key and value of the element that the cursor
+ /// is currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn key_value(&self) -> Option<(&'a K, &'a V)> {
+ self.current.as_ref().map(|current| current.into_kv())
+ }
+
+ /// Returns a reference to the next element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the first element of the `BTreeMap`. If it is pointing to the last
+ /// element of the `BTreeMap` then this returns `None`.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn peek_next(&self) -> Option<(&'a K, &'a V)> {
+ let mut next = self.clone();
+ next.move_next();
+ next.current.as_ref().map(|current| current.into_kv())
+ }
+
+ /// Returns a reference to the previous element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the last element of the `BTreeMap`. If it is pointing to the first
+ /// element of the `BTreeMap` then this returns `None`.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn peek_prev(&self) -> Option<(&'a K, &'a V)> {
+ let mut prev = self.clone();
+ prev.move_prev();
+ prev.current.as_ref().map(|current| current.into_kv())
+ }
+}
+
+impl<'a, K, V, A> CursorMut<'a, K, V, A> {
+ /// Moves the cursor to the next element of the `BTreeMap`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the first element of the `BTreeMap`. If it is pointing to the last
+ /// element of the `BTreeMap` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn move_next(&mut self) {
+ match self.current.take() {
+ None => {
+ // SAFETY: The previous borrow of root has ended.
+ self.current = unsafe { self.root.reborrow() }.as_mut().and_then(|root| {
+ root.borrow_mut().first_leaf_edge().forget_node_type().right_kv().ok()
+ });
+ }
+ Some(current) => {
+ self.current = current.next_leaf_edge().next_kv().ok();
+ }
+ }
+ }
+
+ /// Moves the cursor to the previous element of the `BTreeMap`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the last element of the `BTreeMap`. If it is pointing to the first
+ /// element of the `BTreeMap` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn move_prev(&mut self) {
+ match self.current.take() {
+ None => {
+ // SAFETY: The previous borrow of root has ended.
+ self.current = unsafe { self.root.reborrow() }.as_mut().and_then(|root| {
+ root.borrow_mut().last_leaf_edge().forget_node_type().left_kv().ok()
+ });
+ }
+ Some(current) => {
+ self.current = current.next_back_leaf_edge().next_back_kv().ok();
+ }
+ }
+ }
+
+ /// Returns a reference to the key of the element that the cursor is
+ /// currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn key(&self) -> Option<&K> {
+ self.current.as_ref().map(|current| current.reborrow().into_kv().0)
+ }
+
+ /// Returns a reference to the value of the element that the cursor is
+ /// currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn value(&self) -> Option<&V> {
+ self.current.as_ref().map(|current| current.reborrow().into_kv().1)
+ }
+
+ /// Returns a reference to the key and value of the element that the cursor
+ /// is currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn key_value(&self) -> Option<(&K, &V)> {
+ self.current.as_ref().map(|current| current.reborrow().into_kv())
+ }
+
+ /// Returns a mutable reference to the value of the element that the cursor
+ /// is currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn value_mut(&mut self) -> Option<&mut V> {
+ self.current.as_mut().map(|current| current.kv_mut().1)
+ }
+
+ /// Returns a reference to the key and mutable reference to the value of the
+ /// element that the cursor is currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn key_value_mut(&mut self) -> Option<(&K, &mut V)> {
+ self.current.as_mut().map(|current| {
+ let (k, v) = current.kv_mut();
+ (&*k, v)
+ })
+ }
+
+ /// Returns a mutable reference to the of the element that the cursor is
+ /// currently pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ ///
+ /// # Safety
+ ///
+ /// This can be used to modify the key, but you must ensure that the
+ /// `BTreeMap` invariants are maintained. Specifically:
+ ///
+ /// * The key must remain unique within the tree.
+ /// * The key must remain in sorted order with regards to other elements in
+ /// the tree.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub unsafe fn key_mut_unchecked(&mut self) -> Option<&mut K> {
+ self.current.as_mut().map(|current| current.kv_mut().0)
+ }
+
+ /// Returns a reference to the key and value of the next element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the first element of the `BTreeMap`. If it is pointing to the last
+ /// element of the `BTreeMap` then this returns `None`.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn peek_next(&mut self) -> Option<(&K, &mut V)> {
+ let (k, v) = match self.current {
+ None => {
+ // SAFETY: The previous borrow of root has ended.
+ unsafe { self.root.reborrow() }
+ .as_mut()?
+ .borrow_mut()
+ .first_leaf_edge()
+ .next_kv()
+ .ok()?
+ .into_kv_valmut()
+ }
+ // SAFETY: We're not using this to mutate the tree.
+ Some(ref mut current) => {
+ unsafe { current.reborrow_mut() }.next_leaf_edge().next_kv().ok()?.into_kv_valmut()
+ }
+ };
+ Some((k, v))
+ }
+
+ /// Returns a reference to the key and value of the previous element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the last element of the `BTreeMap`. If it is pointing to the first
+ /// element of the `BTreeMap` then this returns `None`.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn peek_prev(&mut self) -> Option<(&K, &mut V)> {
+ let (k, v) = match self.current.as_mut() {
+ None => {
+ // SAFETY: The previous borrow of root has ended.
+ unsafe { self.root.reborrow() }
+ .as_mut()?
+ .borrow_mut()
+ .first_leaf_edge()
+ .next_kv()
+ .ok()?
+ .into_kv_valmut()
+ }
+ Some(current) => {
+ // SAFETY: We're not using this to mutate the tree.
+ unsafe { current.reborrow_mut() }
+ .next_back_leaf_edge()
+ .next_back_kv()
+ .ok()?
+ .into_kv_valmut()
+ }
+ };
+ Some((k, v))
+ }
+
+ /// Returns a read-only cursor pointing to the current element.
+ ///
+ /// The lifetime of the returned `Cursor` is bound to that of the
+ /// `CursorMut`, which means it cannot outlive the `CursorMut` and that the
+ /// `CursorMut` is frozen for the lifetime of the `Cursor`.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn as_cursor(&self) -> Cursor<'_, K, V> {
+ Cursor {
+ // SAFETY: The tree is immutable while the cursor exists.
+ root: unsafe { self.root.reborrow_shared().as_ref() },
+ current: self.current.as_ref().map(|current| current.reborrow()),
+ }
+ }
+}
+
+// Now the tree editing operations
+impl<'a, K: Ord, V, A: Allocator + Clone> CursorMut<'a, K, V, A> {
+ /// Inserts a new element into the `BTreeMap` after the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new element is
+ /// inserted at the front of the `BTreeMap`.
+ ///
+ /// # Safety
+ ///
+ /// You must ensure that the `BTreeMap` invariants are maintained.
+ /// Specifically:
+ ///
+ /// * The key of the newly inserted element must be unique in the tree.
+ /// * All keys in the tree must remain in sorted order.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub unsafe fn insert_after_unchecked(&mut self, key: K, value: V) {
+ let edge = match self.current.take() {
+ None => {
+ // SAFETY: We have no other reference to the tree.
+ match unsafe { self.root.reborrow() } {
+ root @ None => {
+ // Tree is empty, allocate a new root.
+ let mut node = NodeRef::new_leaf(self.alloc.clone());
+ node.borrow_mut().push(key, value);
+ *root = Some(node.forget_type());
+ *self.length += 1;
+ return;
+ }
+ Some(root) => root.borrow_mut().first_leaf_edge(),
+ }
+ }
+ Some(current) => current.next_leaf_edge(),
+ };
+
+ let handle = edge.insert_recursing(key, value, self.alloc.clone(), |ins| {
+ drop(ins.left);
+ // SAFETY: The handle to the newly inserted value is always on a
+ // leaf node, so adding a new root node doesn't invalidate it.
+ let root = unsafe { self.root.reborrow().as_mut().unwrap() };
+ root.push_internal_level(self.alloc.clone()).push(ins.kv.0, ins.kv.1, ins.right)
+ });
+ self.current = handle.left_edge().next_back_kv().ok();
+ *self.length += 1;
+ }
+
+ /// Inserts a new element into the `BTreeMap` before the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new element is
+ /// inserted at the end of the `BTreeMap`.
+ ///
+ /// # Safety
+ ///
+ /// You must ensure that the `BTreeMap` invariants are maintained.
+ /// Specifically:
+ ///
+ /// * The key of the newly inserted element must be unique in the tree.
+ /// * All keys in the tree must remain in sorted order.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub unsafe fn insert_before_unchecked(&mut self, key: K, value: V) {
+ let edge = match self.current.take() {
+ None => {
+ // SAFETY: We have no other reference to the tree.
+ match unsafe { self.root.reborrow() } {
+ root @ None => {
+ // Tree is empty, allocate a new root.
+ let mut node = NodeRef::new_leaf(self.alloc.clone());
+ node.borrow_mut().push(key, value);
+ *root = Some(node.forget_type());
+ *self.length += 1;
+ return;
+ }
+ Some(root) => root.borrow_mut().last_leaf_edge(),
+ }
+ }
+ Some(current) => current.next_back_leaf_edge(),
+ };
+
+ let handle = edge.insert_recursing(key, value, self.alloc.clone(), |ins| {
+ drop(ins.left);
+ // SAFETY: The handle to the newly inserted value is always on a
+ // leaf node, so adding a new root node doesn't invalidate it.
+ let root = unsafe { self.root.reborrow().as_mut().unwrap() };
+ root.push_internal_level(self.alloc.clone()).push(ins.kv.0, ins.kv.1, ins.right)
+ });
+ self.current = handle.right_edge().next_kv().ok();
+ *self.length += 1;
+ }
+
+ /// Inserts a new element into the `BTreeMap` after the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new element is
+ /// inserted at the front of the `BTreeMap`.
+ ///
+ /// # Panics
+ ///
+ /// This function panics if:
+ /// - the given key compares less than or equal to the current element (if
+ /// any).
+ /// - the given key compares greater than or equal to the next element (if
+ /// any).
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn insert_after(&mut self, key: K, value: V) {
+ if let Some(current) = self.key() {
+ if &key <= current {
+ panic!("key must be ordered above the current element");
+ }
+ }
+ if let Some((next, _)) = self.peek_prev() {
+ if &key >= next {
+ panic!("key must be ordered below the next element");
+ }
+ }
+ unsafe {
+ self.insert_after_unchecked(key, value);
+ }
+ }
+
+ /// Inserts a new element into the `BTreeMap` before the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new element is
+ /// inserted at the end of the `BTreeMap`.
+ ///
+ /// # Panics
+ ///
+ /// This function panics if:
+ /// - the given key compares greater than or equal to the current element
+ /// (if any).
+ /// - the given key compares less than or equal to the previous element (if
+ /// any).
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn insert_before(&mut self, key: K, value: V) {
+ if let Some(current) = self.key() {
+ if &key >= current {
+ panic!("key must be ordered below the current element");
+ }
+ }
+ if let Some((prev, _)) = self.peek_prev() {
+ if &key <= prev {
+ panic!("key must be ordered above the previous element");
+ }
+ }
+ unsafe {
+ self.insert_before_unchecked(key, value);
+ }
+ }
+
+ /// Removes the current element from the `BTreeMap`.
+ ///
+ /// The element that was removed is returned, and the cursor is
+ /// moved to point to the next element in the `BTreeMap`.
+ ///
+ /// If the cursor is currently pointing to the "ghost" non-element then no element
+ /// is removed and `None` is returned. The cursor is not moved in this case.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn remove_current(&mut self) -> Option<(K, V)> {
+ let current = self.current.take()?;
+ let mut emptied_internal_root = false;
+ let (kv, pos) =
+ current.remove_kv_tracking(|| emptied_internal_root = true, self.alloc.clone());
+ self.current = pos.next_kv().ok();
+ *self.length -= 1;
+ if emptied_internal_root {
+ // SAFETY: This is safe since current does not point within the now
+ // empty root node.
+ let root = unsafe { self.root.reborrow().as_mut().unwrap() };
+ root.pop_internal_level(self.alloc.clone());
+ }
+ Some(kv)
+ }
+
+ /// Removes the current element from the `BTreeMap`.
+ ///
+ /// The element that was removed is returned, and the cursor is
+ /// moved to point to the previous element in the `BTreeMap`.
+ ///
+ /// If the cursor is currently pointing to the "ghost" non-element then no element
+ /// is removed and `None` is returned. The cursor is not moved in this case.
+ #[unstable(feature = "btree_cursors", issue = "107540")]
+ pub fn remove_current_and_move_back(&mut self) -> Option<(K, V)> {
+ let current = self.current.take()?;
+ let mut emptied_internal_root = false;
+ let (kv, pos) =
+ current.remove_kv_tracking(|| emptied_internal_root = true, self.alloc.clone());
+ self.current = pos.next_back_kv().ok();
+ *self.length -= 1;
+ if emptied_internal_root {
+ // SAFETY: This is safe since current does not point within the now
+ // empty root node.
+ let root = unsafe { self.root.reborrow().as_mut().unwrap() };
+ root.pop_internal_level(self.alloc.clone());
+ }
+ Some(kv)
+ }
}
#[cfg(test)]
diff --git a/library/alloc/src/collections/btree/map/entry.rs b/library/alloc/src/collections/btree/map/entry.rs
index 370b58864..e9366eec9 100644
--- a/library/alloc/src/collections/btree/map/entry.rs
+++ b/library/alloc/src/collections/btree/map/entry.rs
@@ -347,7 +347,7 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
/// assert_eq!(map["poneyland"], 37);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(self, value: V) -> &'a mut V {
+ pub fn insert(mut self, value: V) -> &'a mut V {
let out_ptr = match self.handle {
None => {
// SAFETY: There is no tree yet so no reference to it exists.
@@ -358,25 +358,27 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
map.length = 1;
val_ptr
}
- Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) {
- (None, val_ptr) => {
- // SAFETY: We have consumed self.handle.
- let map = unsafe { self.dormant_map.awaken() };
- map.length += 1;
- val_ptr
- }
- (Some(ins), val_ptr) => {
- drop(ins.left);
- // SAFETY: We have consumed self.handle and dropped the
- // remaining reference to the tree, ins.left.
- let map = unsafe { self.dormant_map.awaken() };
- let root = map.root.as_mut().unwrap(); // same as ins.left
- root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right);
- map.length += 1;
- val_ptr
- }
- },
+ Some(handle) => {
+ let new_handle =
+ handle.insert_recursing(self.key, value, self.alloc.clone(), |ins| {
+ drop(ins.left);
+ // SAFETY: Pushing a new root node doesn't invalidate
+ // handles to existing nodes.
+ let map = unsafe { self.dormant_map.reborrow() };
+ let root = map.root.as_mut().unwrap(); // same as ins.left
+ root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right)
+ });
+
+ // Get the pointer to the value
+ let val_ptr = new_handle.into_val_mut();
+
+ // SAFETY: We have consumed self.handle.
+ let map = unsafe { self.dormant_map.awaken() };
+ map.length += 1;
+ val_ptr
+ }
};
+
// Now that we have finished growing the tree using borrowed references,
// dereference the pointer to a part of it, that we picked up along the way.
unsafe { &mut *out_ptr }
diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs
index 700b1463b..76c2f27b4 100644
--- a/library/alloc/src/collections/btree/map/tests.rs
+++ b/library/alloc/src/collections/btree/map/tests.rs
@@ -2336,3 +2336,52 @@ fn from_array() {
let unordered_duplicates = BTreeMap::from([(3, 4), (1, 2), (1, 2)]);
assert_eq!(map, unordered_duplicates);
}
+
+#[test]
+fn test_cursor() {
+ let map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]);
+
+ let mut cur = map.lower_bound(Bound::Unbounded);
+ assert_eq!(cur.key(), Some(&1));
+ cur.move_next();
+ assert_eq!(cur.key(), Some(&2));
+ assert_eq!(cur.peek_next(), Some((&3, &'c')));
+ cur.move_prev();
+ assert_eq!(cur.key(), Some(&1));
+ assert_eq!(cur.peek_prev(), None);
+
+ let mut cur = map.upper_bound(Bound::Excluded(&1));
+ assert_eq!(cur.key(), None);
+ cur.move_next();
+ assert_eq!(cur.key(), Some(&1));
+ cur.move_prev();
+ assert_eq!(cur.key(), None);
+ assert_eq!(cur.peek_prev(), Some((&3, &'c')));
+}
+
+#[test]
+fn test_cursor_mut() {
+ let mut map = BTreeMap::from([(1, 'a'), (3, 'c'), (5, 'e')]);
+ let mut cur = map.lower_bound_mut(Bound::Excluded(&3));
+ assert_eq!(cur.key(), Some(&5));
+ cur.insert_before(4, 'd');
+ assert_eq!(cur.key(), Some(&5));
+ assert_eq!(cur.peek_prev(), Some((&4, &mut 'd')));
+ cur.move_next();
+ assert_eq!(cur.key(), None);
+ cur.insert_before(6, 'f');
+ assert_eq!(cur.key(), None);
+ assert_eq!(cur.remove_current(), None);
+ assert_eq!(cur.key(), None);
+ cur.insert_after(0, '?');
+ assert_eq!(cur.key(), None);
+ assert_eq!(map, BTreeMap::from([(0, '?'), (1, 'a'), (3, 'c'), (4, 'd'), (5, 'e'), (6, 'f')]));
+
+ let mut cur = map.upper_bound_mut(Bound::Included(&5));
+ assert_eq!(cur.key(), Some(&5));
+ assert_eq!(cur.remove_current(), Some((5, 'e')));
+ assert_eq!(cur.key(), Some(&6));
+ assert_eq!(cur.remove_current_and_move_back(), Some((6, 'f')));
+ assert_eq!(cur.key(), Some(&4));
+ assert_eq!(map, BTreeMap::from([(0, '?'), (1, 'a'), (3, 'c'), (4, 'd')]));
+}
diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs
index 1e33c1e64..b890717e5 100644
--- a/library/alloc/src/collections/btree/navigate.rs
+++ b/library/alloc/src/collections/btree/navigate.rs
@@ -4,6 +4,7 @@ use core::ops::RangeBounds;
use core::ptr;
use super::node::{marker, ForceResult::*, Handle, NodeRef};
+use super::search::SearchBound;
use crate::alloc::Allocator;
// `front` and `back` are always both `None` or both `Some`.
@@ -386,7 +387,7 @@ impl<BorrowType: marker::BorrowType, K, V>
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
/// on the left side, which is either in the same leaf node or in an ancestor node.
/// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
- fn next_back_kv(
+ pub fn next_back_kv(
self,
) -> Result<
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
@@ -707,7 +708,9 @@ impl<BorrowType: marker::BorrowType, K, V>
}
/// Returns the leaf edge closest to a KV for backward navigation.
- fn next_back_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ pub fn next_back_leaf_edge(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
match self.force() {
Leaf(leaf_kv) => leaf_kv.left_edge(),
Internal(internal_kv) => {
@@ -717,3 +720,51 @@ impl<BorrowType: marker::BorrowType, K, V>
}
}
}
+
+impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Returns the leaf edge corresponding to the first point at which the
+ /// given bound is true.
+ pub fn lower_bound<Q: ?Sized>(
+ self,
+ mut bound: SearchBound<&Q>,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
+ where
+ Q: Ord,
+ K: Borrow<Q>,
+ {
+ let mut node = self;
+ loop {
+ let (edge, new_bound) = node.find_lower_bound_edge(bound);
+ match edge.force() {
+ Leaf(edge) => return edge,
+ Internal(edge) => {
+ node = edge.descend();
+ bound = new_bound;
+ }
+ }
+ }
+ }
+
+ /// Returns the leaf edge corresponding to the last point at which the
+ /// given bound is true.
+ pub fn upper_bound<Q: ?Sized>(
+ self,
+ mut bound: SearchBound<&Q>,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
+ where
+ Q: Ord,
+ K: Borrow<Q>,
+ {
+ let mut node = self;
+ loop {
+ let (edge, new_bound) = node.find_upper_bound_edge(bound);
+ match edge.force() {
+ Leaf(edge) => return edge,
+ Internal(edge) => {
+ node = edge.descend();
+ bound = new_bound;
+ }
+ }
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs
index 691246644..3233a575e 100644
--- a/library/alloc/src/collections/btree/node.rs
+++ b/library/alloc/src/collections/btree/node.rs
@@ -442,6 +442,24 @@ impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
// SAFETY: we have exclusive access to the entire node.
unsafe { &mut *ptr }
}
+
+ /// Returns a dormant copy of this node with its lifetime erased which can
+ /// be reawakened later.
+ pub fn dormant(&self) -> NodeRef<marker::DormantMut, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+}
+
+impl<K, V, Type> NodeRef<marker::DormantMut, K, V, Type> {
+ /// Revert to the unique borrow initially captured.
+ ///
+ /// # Safety
+ ///
+ /// The reborrow must have ended, i.e., the reference returned by `new` and
+ /// all pointers and references derived from it, must not be used anymore.
+ pub unsafe fn awaken<'a>(self) -> NodeRef<marker::Mut<'a>, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
}
impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
@@ -798,6 +816,25 @@ impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeT
// We can't use Handle::new_kv or Handle::new_edge because we don't know our type
Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
}
+
+ /// Returns a dormant copy of this handle which can be reawakened later.
+ ///
+ /// See `DormantMutRef` for more details.
+ pub fn dormant(&self) -> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
+ Handle { node: self.node.dormant(), idx: self.idx, _marker: PhantomData }
+ }
+}
+
+impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
+ /// Revert to the unique borrow initially captured.
+ ///
+ /// # Safety
+ ///
+ /// The reborrow must have ended, i.e., the reference returned by `new` and
+ /// all pointers and references derived from it, must not be used anymore.
+ pub unsafe fn awaken<'a>(self) -> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
+ Handle { node: unsafe { self.node.awaken() }, idx: self.idx, _marker: PhantomData }
+ }
}
impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
@@ -851,9 +888,11 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// Inserts a new key-value pair between the key-value pairs to the right and left of
/// this edge. This method assumes that there is enough space in the node for the new
/// pair to fit.
- ///
- /// The returned pointer points to the inserted value.
- fn insert_fit(&mut self, key: K, val: V) -> *mut V {
+ unsafe fn insert_fit(
+ mut self,
+ key: K,
+ val: V,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
debug_assert!(self.node.len() < CAPACITY);
let new_len = self.node.len() + 1;
@@ -862,7 +901,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
*self.node.len_mut() = new_len as u16;
- self.node.val_area_mut(self.idx).assume_init_mut()
+ Handle::new_kv(self.node, self.idx)
}
}
}
@@ -871,21 +910,26 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// Inserts a new key-value pair between the key-value pairs to the right and left of
/// this edge. This method splits the node if there isn't enough room.
///
- /// The returned pointer points to the inserted value.
+ /// Returns a dormant handle to the inserted node which can be reawakened
+ /// once splitting is complete.
fn insert<A: Allocator + Clone>(
- mut self,
+ self,
key: K,
val: V,
alloc: A,
- ) -> (Option<SplitResult<'a, K, V, marker::Leaf>>, *mut V) {
+ ) -> (
+ Option<SplitResult<'a, K, V, marker::Leaf>>,
+ Handle<NodeRef<marker::DormantMut, K, V, marker::Leaf>, marker::KV>,
+ ) {
if self.node.len() < CAPACITY {
- let val_ptr = self.insert_fit(key, val);
- (None, val_ptr)
+ // SAFETY: There is enough space in the node for insertion.
+ let handle = unsafe { self.insert_fit(key, val) };
+ (None, handle.dormant())
} else {
let (middle_kv_idx, insertion) = splitpoint(self.idx);
let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
let mut result = middle.split(alloc);
- let mut insertion_edge = match insertion {
+ let insertion_edge = match insertion {
LeftOrRight::Left(insert_idx) => unsafe {
Handle::new_edge(result.left.reborrow_mut(), insert_idx)
},
@@ -893,8 +937,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
Handle::new_edge(result.right.borrow_mut(), insert_idx)
},
};
- let val_ptr = insertion_edge.insert_fit(key, val);
- (Some(result), val_ptr)
+ // SAFETY: We just split the node, so there is enough space for
+ // insertion.
+ let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() };
+ (Some(result), handle)
}
}
}
@@ -976,21 +1022,31 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
key: K,
value: V,
alloc: A,
- ) -> (Option<SplitResult<'a, K, V, marker::LeafOrInternal>>, *mut V) {
- let (mut split, val_ptr) = match self.insert(key, value, alloc.clone()) {
- (None, val_ptr) => return (None, val_ptr),
- (Some(split), val_ptr) => (split.forget_node_type(), val_ptr),
+ split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>),
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ let (mut split, handle) = match self.insert(key, value, alloc.clone()) {
+ // SAFETY: we have finished splitting and can now re-awaken the
+ // handle to the inserted element.
+ (None, handle) => return unsafe { handle.awaken() },
+ (Some(split), handle) => (split.forget_node_type(), handle),
};
loop {
split = match split.left.ascend() {
Ok(parent) => {
match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
- None => return (None, val_ptr),
+ // SAFETY: we have finished splitting and can now re-awaken the
+ // handle to the inserted element.
+ None => return unsafe { handle.awaken() },
Some(split) => split.forget_node_type(),
}
}
- Err(root) => return (Some(SplitResult { left: root, ..split }), val_ptr),
+ Err(root) => {
+ split_root(SplitResult { left: root, ..split });
+ // SAFETY: we have finished splitting and can now re-awaken the
+ // handle to the inserted element.
+ return unsafe { handle.awaken() };
+ }
};
}
}
@@ -1043,6 +1099,14 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
let leaf = self.node.into_leaf_mut();
unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
}
+
+ pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
+ debug_assert!(self.idx < self.node.len());
+ let leaf = self.node.into_leaf_mut();
+ let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
+ let v = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
+ (k, v)
+ }
}
impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
@@ -1667,6 +1731,7 @@ pub mod marker {
pub enum Owned {}
pub enum Dying {}
+ pub enum DormantMut {}
pub struct Immut<'a>(PhantomData<&'a ()>);
pub struct Mut<'a>(PhantomData<&'a mut ()>);
pub struct ValMut<'a>(PhantomData<&'a mut ()>);
@@ -1688,6 +1753,7 @@ pub mod marker {
impl<'a> BorrowType for Immut<'a> {}
impl<'a> BorrowType for Mut<'a> {}
impl<'a> BorrowType for ValMut<'a> {}
+ impl BorrowType for DormantMut {}
pub enum KV {}
pub enum Edge {}
diff --git a/library/alloc/src/collections/vec_deque/into_iter.rs b/library/alloc/src/collections/vec_deque/into_iter.rs
index e54880e86..34bc0ce91 100644
--- a/library/alloc/src/collections/vec_deque/into_iter.rs
+++ b/library/alloc/src/collections/vec_deque/into_iter.rs
@@ -1,5 +1,5 @@
-use core::fmt;
use core::iter::{FusedIterator, TrustedLen};
+use core::{array, fmt, mem::MaybeUninit, ops::Try, ptr};
use crate::alloc::{Allocator, Global};
@@ -52,6 +52,126 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
let len = self.inner.len();
(len, Some(len))
}
+
+ #[inline]
+ fn advance_by(&mut self, n: usize) -> Result<(), usize> {
+ if self.inner.len < n {
+ let len = self.inner.len;
+ self.inner.clear();
+ Err(len)
+ } else {
+ self.inner.drain(..n);
+ Ok(())
+ }
+ }
+
+ #[inline]
+ fn count(self) -> usize {
+ self.inner.len
+ }
+
+ fn try_fold<B, F, R>(&mut self, mut init: B, mut f: F) -> R
+ where
+ F: FnMut(B, Self::Item) -> R,
+ R: Try<Output = B>,
+ {
+ struct Guard<'a, T, A: Allocator> {
+ deque: &'a mut VecDeque<T, A>,
+ // `consumed <= deque.len` always holds.
+ consumed: usize,
+ }
+
+ impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> {
+ fn drop(&mut self) {
+ self.deque.len -= self.consumed;
+ self.deque.head = self.deque.to_physical_idx(self.consumed);
+ }
+ }
+
+ let mut guard = Guard { deque: &mut self.inner, consumed: 0 };
+
+ let (head, tail) = guard.deque.as_slices();
+
+ init = head
+ .iter()
+ .map(|elem| {
+ guard.consumed += 1;
+ // SAFETY: Because we incremented `guard.consumed`, the
+ // deque effectively forgot the element, so we can take
+ // ownership
+ unsafe { ptr::read(elem) }
+ })
+ .try_fold(init, &mut f)?;
+
+ tail.iter()
+ .map(|elem| {
+ guard.consumed += 1;
+ // SAFETY: Same as above.
+ unsafe { ptr::read(elem) }
+ })
+ .try_fold(init, &mut f)
+ }
+
+ #[inline]
+ fn fold<B, F>(mut self, init: B, mut f: F) -> B
+ where
+ F: FnMut(B, Self::Item) -> B,
+ {
+ match self.try_fold(init, |b, item| Ok::<B, !>(f(b, item))) {
+ Ok(b) => b,
+ Err(e) => match e {},
+ }
+ }
+
+ #[inline]
+ fn last(mut self) -> Option<Self::Item> {
+ self.inner.pop_back()
+ }
+
+ fn next_chunk<const N: usize>(
+ &mut self,
+ ) -> Result<[Self::Item; N], array::IntoIter<Self::Item, N>> {
+ let mut raw_arr = MaybeUninit::uninit_array();
+ let raw_arr_ptr = raw_arr.as_mut_ptr().cast();
+ let (head, tail) = self.inner.as_slices();
+
+ if head.len() >= N {
+ // SAFETY: By manually adjusting the head and length of the deque, we effectively
+ // make it forget the first `N` elements, so taking ownership of them is safe.
+ unsafe { ptr::copy_nonoverlapping(head.as_ptr(), raw_arr_ptr, N) };
+ self.inner.head = self.inner.to_physical_idx(N);
+ self.inner.len -= N;
+ // SAFETY: We initialized the entire array with items from `head`
+ return Ok(unsafe { raw_arr.transpose().assume_init() });
+ }
+
+ // SAFETY: Same argument as above.
+ unsafe { ptr::copy_nonoverlapping(head.as_ptr(), raw_arr_ptr, head.len()) };
+ let remaining = N - head.len();
+
+ if tail.len() >= remaining {
+ // SAFETY: Same argument as above.
+ unsafe {
+ ptr::copy_nonoverlapping(tail.as_ptr(), raw_arr_ptr.add(head.len()), remaining)
+ };
+ self.inner.head = self.inner.to_physical_idx(N);
+ self.inner.len -= N;
+ // SAFETY: We initialized the entire array with items from `head` and `tail`
+ Ok(unsafe { raw_arr.transpose().assume_init() })
+ } else {
+ // SAFETY: Same argument as above.
+ unsafe {
+ ptr::copy_nonoverlapping(tail.as_ptr(), raw_arr_ptr.add(head.len()), tail.len())
+ };
+ let init = head.len() + tail.len();
+ // We completely drained all the deques elements.
+ self.inner.head = 0;
+ self.inner.len = 0;
+ // SAFETY: We copied all elements from both slices to the beginning of the array, so
+ // the given range is initialized.
+ Err(unsafe { array::IntoIter::new_unchecked(raw_arr, 0..init) })
+ }
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
@@ -60,10 +180,73 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
fn next_back(&mut self) -> Option<T> {
self.inner.pop_back()
}
+
+ #[inline]
+ fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
+ let len = self.inner.len;
+ if len >= n {
+ self.inner.truncate(len - n);
+ Ok(())
+ } else {
+ self.inner.clear();
+ Err(len)
+ }
+ }
+
+ fn try_rfold<B, F, R>(&mut self, mut init: B, mut f: F) -> R
+ where
+ F: FnMut(B, Self::Item) -> R,
+ R: Try<Output = B>,
+ {
+ struct Guard<'a, T, A: Allocator> {
+ deque: &'a mut VecDeque<T, A>,
+ // `consumed <= deque.len` always holds.
+ consumed: usize,
+ }
+
+ impl<'a, T, A: Allocator> Drop for Guard<'a, T, A> {
+ fn drop(&mut self) {
+ self.deque.len -= self.consumed;
+ }
+ }
+
+ let mut guard = Guard { deque: &mut self.inner, consumed: 0 };
+
+ let (head, tail) = guard.deque.as_slices();
+
+ init = tail
+ .iter()
+ .map(|elem| {
+ guard.consumed += 1;
+ // SAFETY: See `try_fold`'s safety comment.
+ unsafe { ptr::read(elem) }
+ })
+ .try_rfold(init, &mut f)?;
+
+ head.iter()
+ .map(|elem| {
+ guard.consumed += 1;
+ // SAFETY: Same as above.
+ unsafe { ptr::read(elem) }
+ })
+ .try_rfold(init, &mut f)
+ }
+
+ #[inline]
+ fn rfold<B, F>(mut self, init: B, mut f: F) -> B
+ where
+ F: FnMut(B, Self::Item) -> B,
+ {
+ match self.try_rfold(init, |b, item| Ok::<B, !>(f(b, item))) {
+ Ok(b) => b,
+ Err(e) => match e {},
+ }
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
+ #[inline]
fn is_empty(&self) -> bool {
self.inner.is_empty()
}
diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs
index 11bd4c4dc..f99395c72 100644
--- a/library/alloc/src/ffi/c_str.rs
+++ b/library/alloc/src/ffi/c_str.rs
@@ -991,12 +991,6 @@ impl IntoStringError {
pub fn utf8_error(&self) -> Utf8Error {
self.error
}
-
- #[doc(hidden)]
- #[unstable(feature = "cstr_internals", issue = "none")]
- pub fn __source(&self) -> &Utf8Error {
- &self.error
- }
}
impl IntoStringError {
@@ -1141,6 +1135,6 @@ impl core::error::Error for IntoStringError {
}
fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
- Some(self.__source())
+ Some(&self.error)
}
}
diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs
index eadb35cb9..1da86e1a4 100644
--- a/library/alloc/src/fmt.rs
+++ b/library/alloc/src/fmt.rs
@@ -558,7 +558,7 @@ pub use core::fmt::Alignment;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::Error;
#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::fmt::{write, ArgumentV1, Arguments};
+pub use core::fmt::{write, Arguments};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{Binary, Octal};
#[stable(feature = "rust1", since = "1.0.0")]
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index ca75c3895..e9cc3875f 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -87,6 +87,7 @@
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![allow(explicit_outlives_requirements)]
+#![cfg_attr(not(bootstrap), warn(multiple_supertrait_upcastable))]
//
// Library features:
#![feature(alloc_layout_extra)]
@@ -115,7 +116,6 @@
#![feature(const_eval_select)]
#![feature(const_pin)]
#![feature(const_waker)]
-#![feature(cstr_from_bytes_until_nul)]
#![feature(dispatch_from_dyn)]
#![feature(error_generic_member_access)]
#![feature(error_in_core)]
@@ -195,6 +195,7 @@
#![feature(c_unwind)]
#![feature(with_negative_coherence)]
#![cfg_attr(test, feature(panic_update_hook))]
+#![cfg_attr(not(bootstrap), feature(multiple_supertrait_upcastable))]
//
// Rustdoc features:
#![feature(doc_cfg)]
diff --git a/library/alloc/src/macros.rs b/library/alloc/src/macros.rs
index 5198bf297..4c6ae8f25 100644
--- a/library/alloc/src/macros.rs
+++ b/library/alloc/src/macros.rs
@@ -48,6 +48,8 @@ macro_rules! vec {
);
($($x:expr),+ $(,)?) => (
$crate::__rust_force_expr!(<[_]>::into_vec(
+ // This rustc_box is not required, but it produces a dramatic improvement in compile
+ // time when constructing arrays with many elements.
#[rustc_box]
$crate::boxed::Box::new([$($x),+])
))
diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs
index 5a10121bb..3751f2a24 100644
--- a/library/alloc/src/raw_vec.rs
+++ b/library/alloc/src/raw_vec.rs
@@ -241,10 +241,15 @@ impl<T, A: Allocator> RawVec<T, A> {
if T::IS_ZST || self.cap == 0 {
None
} else {
- // We have an allocated chunk of memory, so we can bypass runtime
- // checks to get our current layout.
+ // We could use Layout::array here which ensures the absence of isize and usize overflows
+ // and could hypothetically handle differences between stride and size, but this memory
+ // has already been allocated so we know it can't overflow and currently rust does not
+ // support such types. So we can do better by skipping some checks and avoid an unwrap.
+ let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
unsafe {
- let layout = Layout::array::<T>(self.cap).unwrap_unchecked();
+ let align = mem::align_of::<T>();
+ let size = mem::size_of::<T>().unchecked_mul(self.cap);
+ let layout = Layout::from_size_align_unchecked(size, align);
Some((self.ptr.cast().into(), layout))
}
}
@@ -426,11 +431,13 @@ impl<T, A: Allocator> RawVec<T, A> {
assert!(cap <= self.capacity(), "Tried to shrink to a larger capacity");
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
-
+ // See current_memory() why this assert is here
+ let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
let ptr = unsafe {
// `Layout::array` cannot overflow here because it would have
// overflowed earlier when capacity was larger.
- let new_layout = Layout::array::<T>(cap).unwrap_unchecked();
+ let new_size = mem::size_of::<T>().unchecked_mul(cap);
+ let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
self.alloc
.shrink(ptr, layout, new_layout)
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index c9aa23fc4..932a537c5 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -1092,7 +1092,7 @@ impl<T: ?Sized> Rc<T> {
/// # Safety
///
/// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
- /// they must be must not be dereferenced or have active borrows for the duration
+ /// they must not be dereferenced or have active borrows for the duration
/// of the returned borrow, and their inner type must be exactly the same as the
/// inner type of this Rc (including lifetimes). This is trivially the case if no
/// such pointers exist, for example immediately after `Rc::new`.
@@ -2145,7 +2145,7 @@ impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
Rc::from_iter_exact(self, low)
}
} else {
- // TrustedLen contract guarantees that `upper_bound == `None` implies an iterator
+ // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
// length exceeding `usize::MAX`.
// The default implementation would collect into a vec which would panic.
// Thus we panic here immediately without invoking `Vec` code.
diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs
index fecacc2bb..093dcbbe8 100644
--- a/library/alloc/src/slice.rs
+++ b/library/alloc/src/slice.rs
@@ -782,6 +782,38 @@ impl<T, A: Allocator> BorrowMut<[T]> for Vec<T, A> {
}
}
+// Specializable trait for implementing ToOwned::clone_into. This is
+// public in the crate and has the Allocator parameter so that
+// vec::clone_from use it too.
+#[cfg(not(no_global_oom_handling))]
+pub(crate) trait SpecCloneIntoVec<T, A: Allocator> {
+ fn clone_into(&self, target: &mut Vec<T, A>);
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T: Clone, A: Allocator> SpecCloneIntoVec<T, A> for [T] {
+ default fn clone_into(&self, target: &mut Vec<T, A>) {
+ // drop anything in target that will not be overwritten
+ target.truncate(self.len());
+
+ // target.len <= self.len due to the truncate above, so the
+ // slices here are always in-bounds.
+ let (init, tail) = self.split_at(target.len());
+
+ // reuse the contained values' allocations/resources.
+ target.clone_from_slice(init);
+ target.extend_from_slice(tail);
+ }
+}
+
+#[cfg(not(no_global_oom_handling))]
+impl<T: Copy, A: Allocator> SpecCloneIntoVec<T, A> for [T] {
+ fn clone_into(&self, target: &mut Vec<T, A>) {
+ target.clear();
+ target.extend_from_slice(self);
+ }
+}
+
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone> ToOwned for [T] {
@@ -797,16 +829,7 @@ impl<T: Clone> ToOwned for [T] {
}
fn clone_into(&self, target: &mut Vec<T>) {
- // drop anything in target that will not be overwritten
- target.truncate(self.len());
-
- // target.len <= self.len due to the truncate above, so the
- // slices here are always in-bounds.
- let (init, tail) = self.split_at(target.len());
-
- // reuse the contained values' allocations/resources.
- target.clone_from_slice(init);
- target.extend_from_slice(tail);
+ SpecCloneIntoVec::clone_into(self, target);
}
}
diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs
index ca182c810..c7e7ed3e9 100644
--- a/library/alloc/src/string.rs
+++ b/library/alloc/src/string.rs
@@ -42,8 +42,6 @@
#![stable(feature = "rust1", since = "1.0.0")]
-#[cfg(not(no_global_oom_handling))]
-use core::char::{decode_utf16, REPLACEMENT_CHARACTER};
use core::error::Error;
use core::fmt;
use core::hash;
@@ -683,7 +681,7 @@ impl String {
// This isn't done via collect::<Result<_, _>>() for performance reasons.
// FIXME: the function can be simplified again when #48994 is closed.
let mut ret = String::with_capacity(v.len());
- for c in decode_utf16(v.iter().cloned()) {
+ for c in char::decode_utf16(v.iter().cloned()) {
if let Ok(c) = c {
ret.push(c);
} else {
@@ -722,7 +720,9 @@ impl String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf16_lossy(v: &[u16]) -> String {
- decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect()
+ char::decode_utf16(v.iter().cloned())
+ .map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER))
+ .collect()
}
/// Decomposes a `String` into its raw components.
@@ -928,12 +928,12 @@ impl String {
/// Copies elements from `src` range to the end of the string.
///
- /// ## Panics
+ /// # Panics
///
/// Panics if the starting point or end point do not lie on a [`char`]
/// boundary, or if they're out of bounds.
///
- /// ## Examples
+ /// # Examples
///
/// ```
/// #![feature(string_extend_from_within)]
@@ -2213,10 +2213,6 @@ impl PartialEq for String {
fn eq(&self, other: &String) -> bool {
PartialEq::eq(&self[..], &other[..])
}
- #[inline]
- fn ne(&self, other: &String) -> bool {
- PartialEq::ne(&self[..], &other[..])
- }
}
macro_rules! impl_eq {
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index bab7f5f53..fdd341a06 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -654,6 +654,20 @@ impl<T> Arc<T> {
///
/// This will succeed even if there are outstanding weak references.
///
+ // FIXME: when `Arc::into_inner` is stabilized, add this paragraph:
+ /*
+ /// It is strongly recommended to use [`Arc::into_inner`] instead if you don't
+ /// want to keep the `Arc` in the [`Err`] case.
+ /// Immediately dropping the [`Err`] payload, like in the expression
+ /// `Arc::try_unwrap(this).ok()`, can still cause the strong count to
+ /// drop to zero and the inner value of the `Arc` to be dropped:
+ /// For instance if two threads execute this expression in parallel, then
+ /// there is a race condition. The threads could first both check whether they
+ /// have the last clone of their `Arc` via `Arc::try_unwrap`, and then
+ /// both drop their `Arc` in the call to [`ok`][`Result::ok`],
+ /// taking the strong count from two down to zero.
+ ///
+ */
/// # Examples
///
/// ```
@@ -685,6 +699,137 @@ impl<T> Arc<T> {
Ok(elem)
}
}
+
+ /// Returns the inner value, if the `Arc` has exactly one strong reference.
+ ///
+ /// Otherwise, [`None`] is returned and the `Arc` is dropped.
+ ///
+ /// This will succeed even if there are outstanding weak references.
+ ///
+ /// If `Arc::into_inner` is called on every clone of this `Arc`,
+ /// it is guaranteed that exactly one of the calls returns the inner value.
+ /// This means in particular that the inner value is not dropped.
+ ///
+ /// The similar expression `Arc::try_unwrap(this).ok()` does not
+ /// offer such a guarantee. See the last example below.
+ //
+ // FIXME: when `Arc::into_inner` is stabilized, add this to end
+ // of the previous sentence:
+ /*
+ /// and the documentation of [`Arc::try_unwrap`].
+ */
+ ///
+ /// # Examples
+ ///
+ /// Minimal example demonstrating the guarantee that `Arc::into_inner` gives.
+ /// ```
+ /// #![feature(arc_into_inner)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new(3);
+ /// let y = Arc::clone(&x);
+ ///
+ /// // Two threads calling `Arc::into_inner` on both clones of an `Arc`:
+ /// let x_thread = std::thread::spawn(|| Arc::into_inner(x));
+ /// let y_thread = std::thread::spawn(|| Arc::into_inner(y));
+ ///
+ /// let x_inner_value = x_thread.join().unwrap();
+ /// let y_inner_value = y_thread.join().unwrap();
+ ///
+ /// // One of the threads is guaranteed to receive the inner value:
+ /// assert!(matches!(
+ /// (x_inner_value, y_inner_value),
+ /// (None, Some(3)) | (Some(3), None)
+ /// ));
+ /// // The result could also be `(None, None)` if the threads called
+ /// // `Arc::try_unwrap(x).ok()` and `Arc::try_unwrap(y).ok()` instead.
+ /// ```
+ ///
+ /// A more practical example demonstrating the need for `Arc::into_inner`:
+ /// ```
+ /// #![feature(arc_into_inner)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// // Definition of a simple singly linked list using `Arc`:
+ /// #[derive(Clone)]
+ /// struct LinkedList<T>(Option<Arc<Node<T>>>);
+ /// struct Node<T>(T, Option<Arc<Node<T>>>);
+ ///
+ /// // Dropping a long `LinkedList<T>` relying on the destructor of `Arc`
+ /// // can cause a stack overflow. To prevent this, we can provide a
+ /// // manual `Drop` implementation that does the destruction in a loop:
+ /// impl<T> Drop for LinkedList<T> {
+ /// fn drop(&mut self) {
+ /// let mut link = self.0.take();
+ /// while let Some(arc_node) = link.take() {
+ /// if let Some(Node(_value, next)) = Arc::into_inner(arc_node) {
+ /// link = next;
+ /// }
+ /// }
+ /// }
+ /// }
+ ///
+ /// // Implementation of `new` and `push` omitted
+ /// impl<T> LinkedList<T> {
+ /// /* ... */
+ /// # fn new() -> Self {
+ /// # LinkedList(None)
+ /// # }
+ /// # fn push(&mut self, x: T) {
+ /// # self.0 = Some(Arc::new(Node(x, self.0.take())));
+ /// # }
+ /// }
+ ///
+ /// // The following code could have still caused a stack overflow
+ /// // despite the manual `Drop` impl if that `Drop` impl had used
+ /// // `Arc::try_unwrap(arc).ok()` instead of `Arc::into_inner(arc)`.
+ ///
+ /// // Create a long list and clone it
+ /// let mut x = LinkedList::new();
+ /// for i in 0..100000 {
+ /// x.push(i); // Adds i to the front of x
+ /// }
+ /// let y = x.clone();
+ ///
+ /// // Drop the clones in parallel
+ /// let x_thread = std::thread::spawn(|| drop(x));
+ /// let y_thread = std::thread::spawn(|| drop(y));
+ /// x_thread.join().unwrap();
+ /// y_thread.join().unwrap();
+ /// ```
+
+ // FIXME: when `Arc::into_inner` is stabilized, adjust above documentation
+ // and the documentation of `Arc::try_unwrap` according to the `FIXME`s. Also
+ // open an issue on rust-lang/rust-clippy, asking for a lint against
+ // `Arc::try_unwrap(...).ok()`.
+ #[inline]
+ #[unstable(feature = "arc_into_inner", issue = "106894")]
+ pub fn into_inner(this: Self) -> Option<T> {
+ // Make sure that the ordinary `Drop` implementation isn’t called as well
+ let mut this = mem::ManuallyDrop::new(this);
+
+ // Following the implementation of `drop` and `drop_slow`
+ if this.inner().strong.fetch_sub(1, Release) != 1 {
+ return None;
+ }
+
+ acquire!(this.inner().strong);
+
+ // SAFETY: This mirrors the line
+ //
+ // unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
+ //
+ // in `drop_slow`. Instead of dropping the value behind the pointer,
+ // it is read and eventually returned; `ptr::read` has the same
+ // safety conditions as `ptr::drop_in_place`.
+ let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) };
+
+ drop(Weak { ptr: this.ptr });
+
+ Some(inner)
+ }
}
impl<T> Arc<[T]> {
@@ -1588,7 +1733,7 @@ impl<T: ?Sized> Arc<T> {
/// # Safety
///
/// If any other `Arc` or [`Weak`] pointers to the same allocation exist, then
- /// they must be must not be dereferenced or have active borrows for the duration
+ /// they must not be dereferenced or have active borrows for the duration
/// of the returned borrow, and their inner type must be exactly the same as the
/// inner type of this Rc (including lifetimes). This is trivially the case if no
/// such pointers exist, for example immediately after `Arc::new`.
@@ -2750,7 +2895,7 @@ impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I {
Arc::from_iter_exact(self, low)
}
} else {
- // TrustedLen contract guarantees that `upper_bound == `None` implies an iterator
+ // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
// length exceeding `usize::MAX`.
// The default implementation would collect into a vec which would panic.
// Thus we panic here immediately without invoking `Vec` code.
diff --git a/library/alloc/src/sync/tests.rs b/library/alloc/src/sync/tests.rs
index 0fae8953a..863d58bdf 100644
--- a/library/alloc/src/sync/tests.rs
+++ b/library/alloc/src/sync/tests.rs
@@ -102,6 +102,38 @@ fn try_unwrap() {
}
#[test]
+fn into_inner() {
+ for _ in 0..100
+ // ^ Increase chances of hitting potential race conditions
+ {
+ let x = Arc::new(3);
+ let y = Arc::clone(&x);
+ let r_thread = std::thread::spawn(|| Arc::into_inner(x));
+ let s_thread = std::thread::spawn(|| Arc::into_inner(y));
+ let r = r_thread.join().expect("r_thread panicked");
+ let s = s_thread.join().expect("s_thread panicked");
+ assert!(
+ matches!((r, s), (None, Some(3)) | (Some(3), None)),
+ "assertion failed: unexpected result `{:?}`\
+ \n expected `(None, Some(3))` or `(Some(3), None)`",
+ (r, s),
+ );
+ }
+
+ let x = Arc::new(3);
+ assert_eq!(Arc::into_inner(x), Some(3));
+
+ let x = Arc::new(4);
+ let y = Arc::clone(&x);
+ assert_eq!(Arc::into_inner(x), None);
+ assert_eq!(Arc::into_inner(y), Some(4));
+
+ let x = Arc::new(5);
+ let _w = Arc::downgrade(&x);
+ assert_eq!(Arc::into_inner(x), Some(5));
+}
+
+#[test]
fn into_from_raw() {
let x = Arc::new(Box::new("hello"));
let y = x.clone();
diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs
index 36b0b3c9e..f2aa30f18 100644
--- a/library/alloc/src/vec/mod.rs
+++ b/library/alloc/src/vec/mod.rs
@@ -378,8 +378,8 @@ mod spec_extend;
/// Currently, `Vec` does not guarantee the order in which elements are dropped.
/// The order has changed in the past and may change again.
///
-/// [`get`]: ../../std/vec/struct.Vec.html#method.get
-/// [`get_mut`]: ../../std/vec/struct.Vec.html#method.get_mut
+/// [`get`]: slice::get
+/// [`get_mut`]: slice::get_mut
/// [`String`]: crate::string::String
/// [`&str`]: type@str
/// [`shrink_to_fit`]: Vec::shrink_to_fit
@@ -2647,35 +2647,6 @@ impl<T, A: Allocator> ops::DerefMut for Vec<T, A> {
}
#[cfg(not(no_global_oom_handling))]
-trait SpecCloneFrom {
- fn clone_from(this: &mut Self, other: &Self);
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Clone, A: Allocator> SpecCloneFrom for Vec<T, A> {
- default fn clone_from(this: &mut Self, other: &Self) {
- // drop anything that will not be overwritten
- this.truncate(other.len());
-
- // self.len <= other.len due to the truncate above, so the
- // slices here are always in-bounds.
- let (init, tail) = other.split_at(this.len());
-
- // reuse the contained values' allocations/resources.
- this.clone_from_slice(init);
- this.extend_from_slice(tail);
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
-impl<T: Copy, A: Allocator> SpecCloneFrom for Vec<T, A> {
- fn clone_from(this: &mut Self, other: &Self) {
- this.clear();
- this.extend_from_slice(other);
- }
-}
-
-#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
#[cfg(not(test))]
@@ -2695,7 +2666,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
}
fn clone_from(&mut self, other: &Self) {
- SpecCloneFrom::clone_from(self, other)
+ crate::slice::SpecCloneIntoVec::clone_into(other.as_slice(), self);
}
}
@@ -3160,10 +3131,7 @@ impl<T, const N: usize> From<[T; N]> for Vec<T> {
/// ```
#[cfg(not(test))]
fn from(s: [T; N]) -> Vec<T> {
- <[T]>::into_vec(
- #[rustc_box]
- Box::new(s),
- )
+ <[T]>::into_vec(Box::new(s))
}
#[cfg(test)]