summaryrefslogtreecommitdiffstats
path: root/library/alloc/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:57:31 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:57:31 +0000
commitdc0db358abe19481e475e10c32149b53370f1a1c (patch)
treeab8ce99c4b255ce46f99ef402c27916055b899ee /library/alloc/src
parentReleasing progress-linux version 1.71.1+dfsg1-2~progress7.99u1. (diff)
downloadrustc-dc0db358abe19481e475e10c32149b53370f1a1c.tar.xz
rustc-dc0db358abe19481e475e10c32149b53370f1a1c.zip
Merging upstream version 1.72.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'library/alloc/src')
-rw-r--r--library/alloc/src/alloc.rs9
-rw-r--r--library/alloc/src/boxed.rs10
-rw-r--r--library/alloc/src/collections/binary_heap/mod.rs238
-rw-r--r--library/alloc/src/collections/binary_heap/tests.rs3
-rw-r--r--library/alloc/src/collections/btree/map.rs76
-rw-r--r--library/alloc/src/collections/btree/map/tests.rs70
-rw-r--r--library/alloc/src/collections/btree/set.rs61
-rw-r--r--library/alloc/src/collections/btree/set/tests.rs25
-rw-r--r--library/alloc/src/collections/linked_list.rs60
-rw-r--r--library/alloc/src/collections/linked_list/tests.rs53
-rw-r--r--library/alloc/src/ffi/c_str.rs2
-rw-r--r--library/alloc/src/lib.rs10
-rw-r--r--library/alloc/src/rc.rs168
-rw-r--r--library/alloc/src/rc/tests.rs45
-rw-r--r--library/alloc/src/slice/tests.rs1
-rw-r--r--library/alloc/src/string.rs15
-rw-r--r--library/alloc/src/sync.rs26
-rw-r--r--library/alloc/src/vec/drain_filter.rs197
-rw-r--r--library/alloc/src/vec/extract_if.rs113
-rw-r--r--library/alloc/src/vec/mod.rs94
-rw-r--r--library/alloc/src/vec/spec_from_elem.rs6
21 files changed, 736 insertions, 546 deletions
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs
index 01d1fdc9b..e24a0fe51 100644
--- a/library/alloc/src/alloc.rs
+++ b/library/alloc/src/alloc.rs
@@ -4,8 +4,10 @@
#[cfg(not(test))]
use core::intrinsics;
+#[cfg(all(bootstrap, not(test)))]
use core::intrinsics::{min_align_of_val, size_of_val};
+#[cfg(all(bootstrap, not(test)))]
use core::ptr::Unique;
#[cfg(not(test))]
use core::ptr::{self, NonNull};
@@ -38,7 +40,6 @@ extern "Rust" {
#[rustc_nounwind]
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
- #[cfg(not(bootstrap))]
static __rust_no_alloc_shim_is_unstable: u8;
}
@@ -96,7 +97,6 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
unsafe {
// Make sure we don't accidentally allow omitting the allocator shim in
// stable code until it is actually stabilized.
- #[cfg(not(bootstrap))]
core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
__rust_alloc(layout.size(), layout.align())
@@ -337,14 +337,15 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
}
}
-#[cfg_attr(not(test), lang = "box_free")]
+#[cfg(all(bootstrap, not(test)))]
+#[lang = "box_free"]
#[inline]
// This signature has to be the same as `Box`, otherwise an ICE will happen.
// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as
// well.
// For example if `Box` is changed to `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well.
-pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
+unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
unsafe {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs
index 1768687e8..8ef2bac92 100644
--- a/library/alloc/src/boxed.rs
+++ b/library/alloc/src/boxed.rs
@@ -1211,8 +1211,16 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
+ #[inline]
fn drop(&mut self) {
- // FIXME: Do nothing, drop is currently performed by compiler.
+ // the T in the Box is dropped by the compiler before the destructor is run
+
+ let ptr = self.0;
+
+ unsafe {
+ let layout = Layout::for_value_raw(ptr.as_ptr());
+ self.1.deallocate(From::from(ptr.cast()), layout)
+ }
}
}
diff --git a/library/alloc/src/collections/binary_heap/mod.rs b/library/alloc/src/collections/binary_heap/mod.rs
index 2c089bb31..66573b90d 100644
--- a/library/alloc/src/collections/binary_heap/mod.rs
+++ b/library/alloc/src/collections/binary_heap/mod.rs
@@ -143,6 +143,7 @@
#![allow(missing_docs)]
#![stable(feature = "rust1", since = "1.0.0")]
+use core::alloc::Allocator;
use core::fmt;
use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen};
use core::mem::{self, swap, ManuallyDrop};
@@ -150,6 +151,7 @@ use core::num::NonZeroUsize;
use core::ops::{Deref, DerefMut};
use core::ptr;
+use crate::alloc::Global;
use crate::collections::TryReserveError;
use crate::slice;
use crate::vec::{self, AsVecIntoIter, Vec};
@@ -271,8 +273,11 @@ mod tests;
/// [peek\_mut]: BinaryHeap::peek_mut
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "BinaryHeap")]
-pub struct BinaryHeap<T> {
- data: Vec<T>,
+pub struct BinaryHeap<
+ T,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
+ data: Vec<T, A>,
}
/// Structure wrapping a mutable reference to the greatest item on a
@@ -283,22 +288,26 @@ pub struct BinaryHeap<T> {
///
/// [`peek_mut`]: BinaryHeap::peek_mut
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-pub struct PeekMut<'a, T: 'a + Ord> {
- heap: &'a mut BinaryHeap<T>,
+pub struct PeekMut<
+ 'a,
+ T: 'a + Ord,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
+ heap: &'a mut BinaryHeap<T, A>,
// If a set_len + sift_down are required, this is Some. If a &mut T has not
// yet been exposed to peek_mut()'s caller, it's None.
original_len: Option<NonZeroUsize>,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<T: Ord + fmt::Debug> fmt::Debug for PeekMut<'_, T> {
+impl<T: Ord + fmt::Debug, A: Allocator> fmt::Debug for PeekMut<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
}
}
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-impl<T: Ord> Drop for PeekMut<'_, T> {
+impl<T: Ord, A: Allocator> Drop for PeekMut<'_, T, A> {
fn drop(&mut self) {
if let Some(original_len) = self.original_len {
// SAFETY: That's how many elements were in the Vec at the time of
@@ -315,7 +324,7 @@ impl<T: Ord> Drop for PeekMut<'_, T> {
}
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-impl<T: Ord> Deref for PeekMut<'_, T> {
+impl<T: Ord, A: Allocator> Deref for PeekMut<'_, T, A> {
type Target = T;
fn deref(&self) -> &T {
debug_assert!(!self.heap.is_empty());
@@ -325,7 +334,7 @@ impl<T: Ord> Deref for PeekMut<'_, T> {
}
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
-impl<T: Ord> DerefMut for PeekMut<'_, T> {
+impl<T: Ord, A: Allocator> DerefMut for PeekMut<'_, T, A> {
fn deref_mut(&mut self) -> &mut T {
debug_assert!(!self.heap.is_empty());
@@ -353,10 +362,10 @@ impl<T: Ord> DerefMut for PeekMut<'_, T> {
}
}
-impl<'a, T: Ord> PeekMut<'a, T> {
+impl<'a, T: Ord, A: Allocator> PeekMut<'a, T, A> {
/// Removes the peeked value from the heap and returns it.
#[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
- pub fn pop(mut this: PeekMut<'a, T>) -> T {
+ pub fn pop(mut this: PeekMut<'a, T, A>) -> T {
if let Some(original_len) = this.original_len.take() {
// SAFETY: This is how many elements were in the Vec at the time of
// the BinaryHeap::peek_mut call.
@@ -371,7 +380,7 @@ impl<'a, T: Ord> PeekMut<'a, T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Clone> Clone for BinaryHeap<T> {
+impl<T: Clone, A: Allocator + Clone> Clone for BinaryHeap<T, A> {
fn clone(&self) -> Self {
BinaryHeap { data: self.data.clone() }
}
@@ -391,18 +400,22 @@ impl<T: Ord> Default for BinaryHeap<T> {
}
#[stable(feature = "binaryheap_debug", since = "1.4.0")]
-impl<T: fmt::Debug> fmt::Debug for BinaryHeap<T> {
+impl<T: fmt::Debug, A: Allocator> fmt::Debug for BinaryHeap<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
-struct RebuildOnDrop<'a, T: Ord> {
- heap: &'a mut BinaryHeap<T>,
+struct RebuildOnDrop<
+ 'a,
+ T: Ord,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
+ heap: &'a mut BinaryHeap<T, A>,
rebuild_from: usize,
}
-impl<'a, T: Ord> Drop for RebuildOnDrop<'a, T> {
+impl<T: Ord, A: Allocator> Drop for RebuildOnDrop<'_, T, A> {
fn drop(&mut self) {
self.heap.rebuild_tail(self.rebuild_from);
}
@@ -446,6 +459,52 @@ impl<T: Ord> BinaryHeap<T> {
pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
BinaryHeap { data: Vec::with_capacity(capacity) }
}
+}
+
+impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
+ /// Creates an empty `BinaryHeap` as a max-heap, using `A` as allocator.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::alloc::System;
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new_in(System);
+ /// heap.push(4);
+ /// ```
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[must_use]
+ pub fn new_in(alloc: A) -> BinaryHeap<T, A> {
+ BinaryHeap { data: Vec::new_in(alloc) }
+ }
+
+ /// Creates an empty `BinaryHeap` with at least the specified capacity, using `A` as allocator.
+ ///
+ /// The binary heap will be able to hold at least `capacity` elements without
+ /// reallocating. This method is allowed to allocate for more elements than
+ /// `capacity`. If `capacity` is 0, the binary heap will not allocate.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::alloc::System;
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::with_capacity_in(10, System);
+ /// heap.push(4);
+ /// ```
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[must_use]
+ pub fn with_capacity_in(capacity: usize, alloc: A) -> BinaryHeap<T, A> {
+ BinaryHeap { data: Vec::with_capacity_in(capacity, alloc) }
+ }
/// Returns a mutable reference to the greatest item in the binary heap, or
/// `None` if it is empty.
@@ -478,7 +537,7 @@ impl<T: Ord> BinaryHeap<T> {
/// If the item is modified then the worst case time complexity is *O*(log(*n*)),
/// otherwise it's *O*(1).
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
- pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
+ pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, A>> {
if self.is_empty() { None } else { Some(PeekMut { heap: self, original_len: None }) }
}
@@ -573,7 +632,7 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
- pub fn into_sorted_vec(mut self) -> Vec<T> {
+ pub fn into_sorted_vec(mut self) -> Vec<T, A> {
let mut end = self.len();
while end > 1 {
end -= 1;
@@ -831,7 +890,7 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
#[inline]
#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
- pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> {
+ pub fn drain_sorted(&mut self) -> DrainSorted<'_, T, A> {
DrainSorted { inner: self }
}
@@ -874,7 +933,7 @@ impl<T: Ord> BinaryHeap<T> {
}
}
-impl<T> BinaryHeap<T> {
+impl<T, A: Allocator> BinaryHeap<T, A> {
/// Returns an iterator visiting all values in the underlying vector, in
/// arbitrary order.
///
@@ -911,7 +970,7 @@ impl<T> BinaryHeap<T> {
/// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), [5, 4]);
/// ```
#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
- pub fn into_iter_sorted(self) -> IntoIterSorted<T> {
+ pub fn into_iter_sorted(self) -> IntoIterSorted<T, A> {
IntoIterSorted { inner: self }
}
@@ -1178,10 +1237,17 @@ impl<T> BinaryHeap<T> {
/// ```
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
- pub fn into_vec(self) -> Vec<T> {
+ pub fn into_vec(self) -> Vec<T, A> {
self.into()
}
+ /// Returns a reference to the underlying allocator.
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn allocator(&self) -> &A {
+ self.data.allocator()
+ }
+
/// Returns the length of the binary heap.
///
/// # Examples
@@ -1249,7 +1315,7 @@ impl<T> BinaryHeap<T> {
/// ```
#[inline]
#[stable(feature = "drain", since = "1.6.0")]
- pub fn drain(&mut self) -> Drain<'_, T> {
+ pub fn drain(&mut self) -> Drain<'_, T, A> {
Drain { iter: self.data.drain(..) }
}
@@ -1419,19 +1485,30 @@ impl<T> FusedIterator for Iter<'_, T> {}
/// [`into_iter`]: BinaryHeap::into_iter
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
-pub struct IntoIter<T> {
- iter: vec::IntoIter<T>,
+pub struct IntoIter<
+ T,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
+ iter: vec::IntoIter<T, A>,
+}
+
+impl<T, A: Allocator> IntoIter<T, A> {
+ /// Returns a reference to the underlying allocator.
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn allocator(&self) -> &A {
+ self.iter.allocator()
+ }
}
#[stable(feature = "collection_debug", since = "1.17.0")]
-impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> Iterator for IntoIter<T> {
+impl<T, A: Allocator> Iterator for IntoIter<T, A> {
type Item = T;
#[inline]
@@ -1446,7 +1523,7 @@ impl<T> Iterator for IntoIter<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> DoubleEndedIterator for IntoIter<T> {
+impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back()
@@ -1454,14 +1531,14 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> ExactSizeIterator for IntoIter<T> {
+impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[stable(feature = "fused", since = "1.26.0")]
-impl<T> FusedIterator for IntoIter<T> {}
+impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
#[stable(feature = "default_iters", since = "1.70.0")]
impl<T> Default for IntoIter<T> {
@@ -1481,8 +1558,8 @@ impl<T> Default for IntoIter<T> {
// also refer to the vec::in_place_collect module documentation to get an overview
#[unstable(issue = "none", feature = "inplace_iteration")]
#[doc(hidden)]
-unsafe impl<T> SourceIter for IntoIter<T> {
- type Source = IntoIter<T>;
+unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
+ type Source = IntoIter<T, A>;
#[inline]
unsafe fn as_inner(&mut self) -> &mut Self::Source {
@@ -1492,7 +1569,7 @@ unsafe impl<T> SourceIter for IntoIter<T> {
#[unstable(issue = "none", feature = "inplace_iteration")]
#[doc(hidden)]
-unsafe impl<I> InPlaceIterable for IntoIter<I> {}
+unsafe impl<I, A: Allocator> InPlaceIterable for IntoIter<I, A> {}
unsafe impl<I> AsVecIntoIter for IntoIter<I> {
type Item = I;
@@ -1505,12 +1582,23 @@ unsafe impl<I> AsVecIntoIter for IntoIter<I> {
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
#[derive(Clone, Debug)]
-pub struct IntoIterSorted<T> {
- inner: BinaryHeap<T>,
+pub struct IntoIterSorted<
+ T,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
+ inner: BinaryHeap<T, A>,
+}
+
+impl<T, A: Allocator> IntoIterSorted<T, A> {
+ /// Returns a reference to the underlying allocator.
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn allocator(&self) -> &A {
+ self.inner.allocator()
+ }
}
#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
-impl<T: Ord> Iterator for IntoIterSorted<T> {
+impl<T: Ord, A: Allocator> Iterator for IntoIterSorted<T, A> {
type Item = T;
#[inline]
@@ -1526,13 +1614,13 @@ impl<T: Ord> Iterator for IntoIterSorted<T> {
}
#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
-impl<T: Ord> ExactSizeIterator for IntoIterSorted<T> {}
+impl<T: Ord, A: Allocator> ExactSizeIterator for IntoIterSorted<T, A> {}
#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
-impl<T: Ord> FusedIterator for IntoIterSorted<T> {}
+impl<T: Ord, A: Allocator> FusedIterator for IntoIterSorted<T, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<T: Ord> TrustedLen for IntoIterSorted<T> {}
+unsafe impl<T: Ord, A: Allocator> TrustedLen for IntoIterSorted<T, A> {}
/// A draining iterator over the elements of a `BinaryHeap`.
///
@@ -1542,12 +1630,24 @@ unsafe impl<T: Ord> TrustedLen for IntoIterSorted<T> {}
/// [`drain`]: BinaryHeap::drain
#[stable(feature = "drain", since = "1.6.0")]
#[derive(Debug)]
-pub struct Drain<'a, T: 'a> {
- iter: vec::Drain<'a, T>,
+pub struct Drain<
+ 'a,
+ T: 'a,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
+ iter: vec::Drain<'a, T, A>,
+}
+
+impl<T, A: Allocator> Drain<'_, T, A> {
+ /// Returns a reference to the underlying allocator.
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn allocator(&self) -> &A {
+ self.iter.allocator()
+ }
}
#[stable(feature = "drain", since = "1.6.0")]
-impl<T> Iterator for Drain<'_, T> {
+impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
type Item = T;
#[inline]
@@ -1562,7 +1662,7 @@ impl<T> Iterator for Drain<'_, T> {
}
#[stable(feature = "drain", since = "1.6.0")]
-impl<T> DoubleEndedIterator for Drain<'_, T> {
+impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back()
@@ -1570,14 +1670,14 @@ impl<T> DoubleEndedIterator for Drain<'_, T> {
}
#[stable(feature = "drain", since = "1.6.0")]
-impl<T> ExactSizeIterator for Drain<'_, T> {
+impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[stable(feature = "fused", since = "1.26.0")]
-impl<T> FusedIterator for Drain<'_, T> {}
+impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
/// A draining iterator over the elements of a `BinaryHeap`.
///
@@ -1587,17 +1687,29 @@ impl<T> FusedIterator for Drain<'_, T> {}
/// [`drain_sorted`]: BinaryHeap::drain_sorted
#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
#[derive(Debug)]
-pub struct DrainSorted<'a, T: Ord> {
- inner: &'a mut BinaryHeap<T>,
+pub struct DrainSorted<
+ 'a,
+ T: Ord,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> {
+ inner: &'a mut BinaryHeap<T, A>,
+}
+
+impl<'a, T: Ord, A: Allocator> DrainSorted<'a, T, A> {
+ /// Returns a reference to the underlying allocator.
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ pub fn allocator(&self) -> &A {
+ self.inner.allocator()
+ }
}
#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
-impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
+impl<'a, T: Ord, A: Allocator> Drop for DrainSorted<'a, T, A> {
/// Removes heap elements in heap order.
fn drop(&mut self) {
- struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>);
+ struct DropGuard<'r, 'a, T: Ord, A: Allocator>(&'r mut DrainSorted<'a, T, A>);
- impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> {
+ impl<'r, 'a, T: Ord, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
fn drop(&mut self) {
while self.0.inner.pop().is_some() {}
}
@@ -1612,7 +1724,7 @@ impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
}
#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
-impl<T: Ord> Iterator for DrainSorted<'_, T> {
+impl<T: Ord, A: Allocator> Iterator for DrainSorted<'_, T, A> {
type Item = T;
#[inline]
@@ -1628,20 +1740,20 @@ impl<T: Ord> Iterator for DrainSorted<'_, T> {
}
#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
-impl<T: Ord> ExactSizeIterator for DrainSorted<'_, T> {}
+impl<T: Ord, A: Allocator> ExactSizeIterator for DrainSorted<'_, T, A> {}
#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
-impl<T: Ord> FusedIterator for DrainSorted<'_, T> {}
+impl<T: Ord, A: Allocator> FusedIterator for DrainSorted<'_, T, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
-unsafe impl<T: Ord> TrustedLen for DrainSorted<'_, T> {}
+unsafe impl<T: Ord, A: Allocator> TrustedLen for DrainSorted<'_, T, A> {}
#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
-impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
+impl<T: Ord, A: Allocator> From<Vec<T, A>> for BinaryHeap<T, A> {
/// Converts a `Vec<T>` into a `BinaryHeap<T>`.
///
/// This conversion happens in-place, and has *O*(*n*) time complexity.
- fn from(vec: Vec<T>) -> BinaryHeap<T> {
+ fn from(vec: Vec<T, A>) -> BinaryHeap<T, A> {
let mut heap = BinaryHeap { data: vec };
heap.rebuild();
heap
@@ -1665,12 +1777,12 @@ impl<T: Ord, const N: usize> From<[T; N]> for BinaryHeap<T> {
}
#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
-impl<T> From<BinaryHeap<T>> for Vec<T> {
+impl<T, A: Allocator> From<BinaryHeap<T, A>> for Vec<T, A> {
/// Converts a `BinaryHeap<T>` into a `Vec<T>`.
///
/// This conversion requires no data movement or allocation, and has
/// constant time complexity.
- fn from(heap: BinaryHeap<T>) -> Vec<T> {
+ fn from(heap: BinaryHeap<T, A>) -> Vec<T, A> {
heap.data
}
}
@@ -1683,9 +1795,9 @@ impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T> IntoIterator for BinaryHeap<T> {
+impl<T, A: Allocator> IntoIterator for BinaryHeap<T, A> {
type Item = T;
- type IntoIter = IntoIter<T>;
+ type IntoIter = IntoIter<T, A>;
/// Creates a consuming iterator, that is, one that moves each value out of
/// the binary heap in arbitrary order. The binary heap cannot be used
@@ -1705,13 +1817,13 @@ impl<T> IntoIterator for BinaryHeap<T> {
/// println!("{x}");
/// }
/// ```
- fn into_iter(self) -> IntoIter<T> {
+ fn into_iter(self) -> IntoIter<T, A> {
IntoIter { iter: self.data.into_iter() }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> IntoIterator for &'a BinaryHeap<T> {
+impl<'a, T, A: Allocator> IntoIterator for &'a BinaryHeap<T, A> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
@@ -1721,7 +1833,7 @@ impl<'a, T> IntoIterator for &'a BinaryHeap<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Ord> Extend<T> for BinaryHeap<T> {
+impl<T: Ord, A: Allocator> Extend<T> for BinaryHeap<T, A> {
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let guard = RebuildOnDrop { rebuild_from: self.len(), heap: self };
@@ -1740,7 +1852,7 @@ impl<T: Ord> Extend<T> for BinaryHeap<T> {
}
#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
+impl<'a, T: 'a + Ord + Copy, A: Allocator> Extend<&'a T> for BinaryHeap<T, A> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
diff --git a/library/alloc/src/collections/binary_heap/tests.rs b/library/alloc/src/collections/binary_heap/tests.rs
index 500caa356..565a7b797 100644
--- a/library/alloc/src/collections/binary_heap/tests.rs
+++ b/library/alloc/src/collections/binary_heap/tests.rs
@@ -309,6 +309,7 @@ fn test_drain_sorted() {
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_drain_sorted_leak() {
let d0 = CrashTestDummy::new(0);
let d1 = CrashTestDummy::new(1);
@@ -475,6 +476,7 @@ fn test_retain() {
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_retain_catch_unwind() {
let mut heap = BinaryHeap::from(vec![3, 1, 2]);
@@ -502,6 +504,7 @@ fn test_retain_catch_unwind() {
// FIXME: re-enable emscripten once it can unwind again
#[test]
#[cfg(not(target_os = "emscripten"))]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn panic_safe() {
use rand::seq::SliceRandom;
use std::cmp;
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index 1f8a1ecba..ff908ec12 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -1132,7 +1132,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
K: Ord,
F: FnMut(&K, &mut V) -> bool,
{
- self.drain_filter(|k, v| !f(k, v));
+ self.extract_if(|k, v| !f(k, v)).for_each(drop);
}
/// Moves all elements from `other` into `self`, leaving `other` empty.
@@ -1395,40 +1395,37 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// The iterator also lets you mutate the value of each element in the
/// closure, regardless of whether you choose to keep or remove it.
///
- /// If the iterator is only partially consumed or not consumed at all, each
- /// of the remaining elements is still subjected to the closure, which may
- /// change its value and, by returning `true`, have the element removed and
- /// dropped.
+ /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
+ /// or the iteration short-circuits, then the remaining elements will be retained.
+ /// Use [`retain`] with a negated predicate if you do not need the returned iterator.
///
- /// It is unspecified how many more elements will be subjected to the
- /// closure if a panic occurs in the closure, or a panic occurs while
- /// dropping an element, or if the `DrainFilter` value is leaked.
+ /// [`retain`]: BTreeMap::retain
///
/// # Examples
///
/// Splitting a map into even and odd keys, reusing the original map:
///
/// ```
- /// #![feature(btree_drain_filter)]
+ /// #![feature(btree_extract_if)]
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
- /// let evens: BTreeMap<_, _> = map.drain_filter(|k, _v| k % 2 == 0).collect();
+ /// let evens: BTreeMap<_, _> = map.extract_if(|k, _v| k % 2 == 0).collect();
/// let odds = map;
/// assert_eq!(evens.keys().copied().collect::<Vec<_>>(), [0, 2, 4, 6]);
/// assert_eq!(odds.keys().copied().collect::<Vec<_>>(), [1, 3, 5, 7]);
/// ```
- #[unstable(feature = "btree_drain_filter", issue = "70530")]
- pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F, A>
+ #[unstable(feature = "btree_extract_if", issue = "70530")]
+ pub fn extract_if<F>(&mut self, pred: F) -> ExtractIf<'_, K, V, F, A>
where
K: Ord,
F: FnMut(&K, &mut V) -> bool,
{
- let (inner, alloc) = self.drain_filter_inner();
- DrainFilter { pred, inner, alloc }
+ let (inner, alloc) = self.extract_if_inner();
+ ExtractIf { pred, inner, alloc }
}
- pub(super) fn drain_filter_inner(&mut self) -> (DrainFilterInner<'_, K, V>, A)
+ pub(super) fn extract_if_inner(&mut self) -> (ExtractIfInner<'_, K, V>, A)
where
K: Ord,
{
@@ -1436,7 +1433,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
let (root, dormant_root) = DormantMutRef::new(root);
let front = root.borrow_mut().first_leaf_edge();
(
- DrainFilterInner {
+ ExtractIfInner {
length: &mut self.length,
dormant_root: Some(dormant_root),
cur_leaf_edge: Some(front),
@@ -1445,7 +1442,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
)
} else {
(
- DrainFilterInner {
+ ExtractIfInner {
length: &mut self.length,
dormant_root: None,
cur_leaf_edge: None,
@@ -1899,9 +1896,10 @@ impl<K, V> Default for Values<'_, K, V> {
}
}
-/// An iterator produced by calling `drain_filter` on BTreeMap.
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-pub struct DrainFilter<
+/// An iterator produced by calling `extract_if` on BTreeMap.
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct ExtractIf<
'a,
K,
V,
@@ -1911,13 +1909,13 @@ pub struct DrainFilter<
F: 'a + FnMut(&K, &mut V) -> bool,
{
pred: F,
- inner: DrainFilterInner<'a, K, V>,
+ inner: ExtractIfInner<'a, K, V>,
/// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
alloc: A,
}
-/// Most of the implementation of DrainFilter are generic over the type
-/// of the predicate, thus also serving for BTreeSet::DrainFilter.
-pub(super) struct DrainFilterInner<'a, K, V> {
+/// Most of the implementation of ExtractIf are generic over the type
+/// of the predicate, thus also serving for BTreeSet::ExtractIf.
+pub(super) struct ExtractIfInner<'a, K, V> {
/// Reference to the length field in the borrowed map, updated live.
length: &'a mut usize,
/// Buried reference to the root field in the borrowed map.
@@ -1929,30 +1927,20 @@ pub(super) struct DrainFilterInner<'a, K, V> {
cur_leaf_edge: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
}
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<K, V, F, A: Allocator + Clone> Drop for DrainFilter<'_, K, V, F, A>
-where
- F: FnMut(&K, &mut V) -> bool,
-{
- fn drop(&mut self) {
- self.for_each(drop);
- }
-}
-
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<K, V, F> fmt::Debug for DrainFilter<'_, K, V, F>
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+impl<K, V, F> fmt::Debug for ExtractIf<'_, K, V, F>
where
K: fmt::Debug,
V: fmt::Debug,
F: FnMut(&K, &mut V) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("DrainFilter").field(&self.inner.peek()).finish()
+ f.debug_tuple("ExtractIf").field(&self.inner.peek()).finish()
}
}
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<K, V, F, A: Allocator + Clone> Iterator for DrainFilter<'_, K, V, F, A>
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+impl<K, V, F, A: Allocator + Clone> Iterator for ExtractIf<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
{
@@ -1967,14 +1955,14 @@ where
}
}
-impl<'a, K, V> DrainFilterInner<'a, K, V> {
+impl<'a, K, V> ExtractIfInner<'a, K, V> {
/// Allow Debug implementations to predict the next element.
pub(super) fn peek(&self) -> Option<(&K, &V)> {
let edge = self.cur_leaf_edge.as_ref()?;
edge.reborrow().next_kv().ok().map(Handle::into_kv)
}
- /// Implementation of a typical `DrainFilter::next` method, given the predicate.
+ /// Implementation of a typical `ExtractIf::next` method, given the predicate.
pub(super) fn next<F, A: Allocator + Clone>(&mut self, pred: &mut F, alloc: A) -> Option<(K, V)>
where
F: FnMut(&K, &mut V) -> bool,
@@ -2001,7 +1989,7 @@ impl<'a, K, V> DrainFilterInner<'a, K, V> {
None
}
- /// Implementation of a typical `DrainFilter::size_hint` method.
+ /// Implementation of a typical `ExtractIf::size_hint` method.
pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
// In most of the btree iterators, `self.length` is the number of elements
// yet to be visited. Here, it includes elements that were visited and that
@@ -2011,8 +1999,8 @@ impl<'a, K, V> DrainFilterInner<'a, K, V> {
}
}
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+impl<K, V, F> FusedIterator for ExtractIf<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
#[stable(feature = "btree_range", since = "1.17.0")]
impl<'a, K, V> Iterator for Range<'a, K, V> {
diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs
index 7ecffe3ee..8681cfcd6 100644
--- a/library/alloc/src/collections/btree/map/tests.rs
+++ b/library/alloc/src/collections/btree/map/tests.rs
@@ -941,13 +941,13 @@ fn test_retain() {
assert_eq!(map[&6], 60);
}
-mod test_drain_filter {
+mod test_extract_if {
use super::*;
#[test]
fn empty() {
let mut map: BTreeMap<i32, i32> = BTreeMap::new();
- map.drain_filter(|_, _| unreachable!("there's nothing to decide on"));
+ map.extract_if(|_, _| unreachable!("there's nothing to decide on")).for_each(drop);
assert_eq!(map.height(), None);
map.check();
}
@@ -957,7 +957,7 @@ mod test_drain_filter {
fn consumed_keeping_all() {
let pairs = (0..3).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
- assert!(map.drain_filter(|_, _| false).eq(iter::empty()));
+ assert!(map.extract_if(|_, _| false).eq(iter::empty()));
map.check();
}
@@ -966,7 +966,7 @@ mod test_drain_filter {
fn consumed_removing_all() {
let pairs = (0..3).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs.clone());
- assert!(map.drain_filter(|_, _| true).eq(pairs));
+ assert!(map.extract_if(|_, _| true).eq(pairs));
assert!(map.is_empty());
map.check();
}
@@ -977,7 +977,7 @@ mod test_drain_filter {
let pairs = (0..3).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
assert!(
- map.drain_filter(|_, v| {
+ map.extract_if(|_, v| {
*v += 6;
false
})
@@ -994,7 +994,7 @@ mod test_drain_filter {
let pairs = (0..3).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
assert!(
- map.drain_filter(|_, v| {
+ map.extract_if(|_, v| {
*v += 6;
true
})
@@ -1008,7 +1008,7 @@ mod test_drain_filter {
fn underfull_keeping_all() {
let pairs = (0..3).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
- map.drain_filter(|_, _| false);
+ map.extract_if(|_, _| false).for_each(drop);
assert!(map.keys().copied().eq(0..3));
map.check();
}
@@ -1018,7 +1018,7 @@ mod test_drain_filter {
let pairs = (0..3).map(|i| (i, i));
for doomed in 0..3 {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i == doomed);
+ map.extract_if(|i, _| *i == doomed).for_each(drop);
assert_eq!(map.len(), 2);
map.check();
}
@@ -1029,7 +1029,7 @@ mod test_drain_filter {
let pairs = (0..3).map(|i| (i, i));
for sacred in 0..3 {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i != sacred);
+ map.extract_if(|i, _| *i != sacred).for_each(drop);
assert!(map.keys().copied().eq(sacred..=sacred));
map.check();
}
@@ -1039,7 +1039,7 @@ mod test_drain_filter {
fn underfull_removing_all() {
let pairs = (0..3).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
- map.drain_filter(|_, _| true);
+ map.extract_if(|_, _| true).for_each(drop);
assert!(map.is_empty());
map.check();
}
@@ -1048,7 +1048,7 @@ mod test_drain_filter {
fn height_0_keeping_all() {
let pairs = (0..node::CAPACITY).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
- map.drain_filter(|_, _| false);
+ map.extract_if(|_, _| false).for_each(drop);
assert!(map.keys().copied().eq(0..node::CAPACITY));
map.check();
}
@@ -1058,7 +1058,7 @@ mod test_drain_filter {
let pairs = (0..node::CAPACITY).map(|i| (i, i));
for doomed in 0..node::CAPACITY {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i == doomed);
+ map.extract_if(|i, _| *i == doomed).for_each(drop);
assert_eq!(map.len(), node::CAPACITY - 1);
map.check();
}
@@ -1069,7 +1069,7 @@ mod test_drain_filter {
let pairs = (0..node::CAPACITY).map(|i| (i, i));
for sacred in 0..node::CAPACITY {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i != sacred);
+ map.extract_if(|i, _| *i != sacred).for_each(drop);
assert!(map.keys().copied().eq(sacred..=sacred));
map.check();
}
@@ -1079,7 +1079,7 @@ mod test_drain_filter {
fn height_0_removing_all() {
let pairs = (0..node::CAPACITY).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
- map.drain_filter(|_, _| true);
+ map.extract_if(|_, _| true).for_each(drop);
assert!(map.is_empty());
map.check();
}
@@ -1087,7 +1087,7 @@ mod test_drain_filter {
#[test]
fn height_0_keeping_half() {
let mut map = BTreeMap::from_iter((0..16).map(|i| (i, i)));
- assert_eq!(map.drain_filter(|i, _| *i % 2 == 0).count(), 8);
+ assert_eq!(map.extract_if(|i, _| *i % 2 == 0).count(), 8);
assert_eq!(map.len(), 8);
map.check();
}
@@ -1096,7 +1096,7 @@ mod test_drain_filter {
fn height_1_removing_all() {
let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
- map.drain_filter(|_, _| true);
+ map.extract_if(|_, _| true).for_each(drop);
assert!(map.is_empty());
map.check();
}
@@ -1106,7 +1106,7 @@ mod test_drain_filter {
let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
for doomed in 0..MIN_INSERTS_HEIGHT_1 {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i == doomed);
+ map.extract_if(|i, _| *i == doomed).for_each(drop);
assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1 - 1);
map.check();
}
@@ -1117,7 +1117,7 @@ mod test_drain_filter {
let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
for sacred in 0..MIN_INSERTS_HEIGHT_1 {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i != sacred);
+ map.extract_if(|i, _| *i != sacred).for_each(drop);
assert!(map.keys().copied().eq(sacred..=sacred));
map.check();
}
@@ -1128,7 +1128,7 @@ mod test_drain_filter {
let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
for doomed in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i == doomed);
+ map.extract_if(|i, _| *i == doomed).for_each(drop);
assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1);
map.check();
}
@@ -1139,7 +1139,7 @@ mod test_drain_filter {
let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
for sacred in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
let mut map = BTreeMap::from_iter(pairs.clone());
- map.drain_filter(|i, _| *i != sacred);
+ map.extract_if(|i, _| *i != sacred).for_each(drop);
assert!(map.keys().copied().eq(sacred..=sacred));
map.check();
}
@@ -1149,12 +1149,13 @@ mod test_drain_filter {
fn height_2_removing_all() {
let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
let mut map = BTreeMap::from_iter(pairs);
- map.drain_filter(|_, _| true);
+ map.extract_if(|_, _| true).for_each(drop);
assert!(map.is_empty());
map.check();
}
#[test]
+ #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn drop_panic_leak() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
@@ -1164,7 +1165,8 @@ mod test_drain_filter {
map.insert(b.spawn(Panic::InDrop), ());
map.insert(c.spawn(Panic::Never), ());
- catch_unwind(move || drop(map.drain_filter(|dummy, _| dummy.query(true)))).unwrap_err();
+ catch_unwind(move || map.extract_if(|dummy, _| dummy.query(true)).for_each(drop))
+ .unwrap_err();
assert_eq!(a.queried(), 1);
assert_eq!(b.queried(), 1);
@@ -1175,6 +1177,7 @@ mod test_drain_filter {
}
#[test]
+ #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn pred_panic_leak() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
@@ -1184,8 +1187,10 @@ mod test_drain_filter {
map.insert(b.spawn(Panic::InQuery), ());
map.insert(c.spawn(Panic::InQuery), ());
- catch_unwind(AssertUnwindSafe(|| drop(map.drain_filter(|dummy, _| dummy.query(true)))))
- .unwrap_err();
+ catch_unwind(AssertUnwindSafe(|| {
+ map.extract_if(|dummy, _| dummy.query(true)).for_each(drop)
+ }))
+ .unwrap_err();
assert_eq!(a.queried(), 1);
assert_eq!(b.queried(), 1);
@@ -1201,6 +1206,7 @@ mod test_drain_filter {
// Same as above, but attempt to use the iterator again after the panic in the predicate
#[test]
+ #[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn pred_panic_reuse() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
@@ -1211,7 +1217,7 @@ mod test_drain_filter {
map.insert(c.spawn(Panic::InQuery), ());
{
- let mut it = map.drain_filter(|dummy, _| dummy.query(true));
+ let mut it = map.extract_if(|dummy, _| dummy.query(true));
catch_unwind(AssertUnwindSafe(|| while it.next().is_some() {})).unwrap_err();
// Iterator behaviour after a panic is explicitly unspecified,
// so this is just the current implementation:
@@ -1449,6 +1455,7 @@ fn test_clear() {
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_clear_drop_panic_leak() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
@@ -1540,11 +1547,13 @@ fn test_clone_panic_leak(size: usize) {
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_clone_panic_leak_height_0() {
test_clone_panic_leak(3)
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_clone_panic_leak_height_1() {
test_clone_panic_leak(MIN_INSERTS_HEIGHT_1)
}
@@ -1651,8 +1660,8 @@ fn assert_sync() {
v.into_values()
}
- fn drain_filter<T: Sync + Ord>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
- v.drain_filter(|_, _| false)
+ fn extract_if<T: Sync + Ord>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ v.extract_if(|_, _| false)
}
fn iter<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
@@ -1720,8 +1729,8 @@ fn assert_send() {
v.into_values()
}
- fn drain_filter<T: Send + Ord>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
- v.drain_filter(|_, _| false)
+ fn extract_if<T: Send + Ord>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ v.extract_if(|_, _| false)
}
fn iter<T: Send + Sync>(v: &BTreeMap<T, T>) -> impl Send + '_ {
@@ -2099,6 +2108,7 @@ create_append_test!(test_append_239, 239);
create_append_test!(test_append_1700, 1700);
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_append_drop_leak() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
@@ -2240,6 +2250,7 @@ fn test_split_off_large_random_sorted() {
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_into_iter_drop_leak_height_0() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
@@ -2263,6 +2274,7 @@ fn test_into_iter_drop_leak_height_0() {
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_into_iter_drop_leak_height_1() {
let size = MIN_INSERTS_HEIGHT_1;
for panic_point in vec![0, 1, size - 2, size - 1] {
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
index 940fa30af..c4461040b 100644
--- a/library/alloc/src/collections/btree/set.rs
+++ b/library/alloc/src/collections/btree/set.rs
@@ -999,7 +999,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
T: Ord,
F: FnMut(&T) -> bool,
{
- self.drain_filter(|v| !f(v));
+ self.extract_if(|v| !f(v)).for_each(drop);
}
/// Moves all elements from `other` into `self`, leaving `other` empty.
@@ -1084,36 +1084,33 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// yielded. If the closure returns `false`, or panics, the element remains
/// in the set and will not be yielded.
///
- /// If the iterator is only partially consumed or not consumed at all, each
- /// of the remaining elements is still subjected to the closure and removed
- /// and dropped if it returns `true`.
- ///
- /// It is unspecified how many more elements will be subjected to the
- /// closure if a panic occurs in the closure, or if a panic occurs while
- /// dropping an element, or if the `DrainFilter` itself is leaked.
+ /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
+ /// or the iteration short-circuits, then the remaining elements will be retained.
+ /// Use [`retain`] with a negated predicate if you do not need the returned iterator.
///
+ /// [`retain`]: BTreeSet::retain
/// # Examples
///
/// Splitting a set into even and odd values, reusing the original set:
///
/// ```
- /// #![feature(btree_drain_filter)]
+ /// #![feature(btree_extract_if)]
/// use std::collections::BTreeSet;
///
/// let mut set: BTreeSet<i32> = (0..8).collect();
- /// let evens: BTreeSet<_> = set.drain_filter(|v| v % 2 == 0).collect();
+ /// let evens: BTreeSet<_> = set.extract_if(|v| v % 2 == 0).collect();
/// let odds = set;
/// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
/// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
/// ```
- #[unstable(feature = "btree_drain_filter", issue = "70530")]
- pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F, A>
+ #[unstable(feature = "btree_extract_if", issue = "70530")]
+ pub fn extract_if<'a, F>(&'a mut self, pred: F) -> ExtractIf<'a, T, F, A>
where
T: Ord,
F: 'a + FnMut(&T) -> bool,
{
- let (inner, alloc) = self.map.drain_filter_inner();
- DrainFilter { pred, inner, alloc }
+ let (inner, alloc) = self.map.extract_if_inner();
+ ExtractIf { pred, inner, alloc }
}
/// Gets an iterator that visits the elements in the `BTreeSet` in ascending
@@ -1275,9 +1272,10 @@ impl<'a, T, A: Allocator + Clone> IntoIterator for &'a BTreeSet<T, A> {
}
}
-/// An iterator produced by calling `drain_filter` on BTreeSet.
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-pub struct DrainFilter<
+/// An iterator produced by calling `extract_if` on BTreeSet.
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct ExtractIf<
'a,
T,
F,
@@ -1287,34 +1285,24 @@ pub struct DrainFilter<
F: 'a + FnMut(&T) -> bool,
{
pred: F,
- inner: super::map::DrainFilterInner<'a, T, SetValZST>,
+ inner: super::map::ExtractIfInner<'a, T, SetValZST>,
/// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
alloc: A,
}
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<T, F, A: Allocator + Clone> Drop for DrainFilter<'_, T, F, A>
-where
- F: FnMut(&T) -> bool,
-{
- fn drop(&mut self) {
- self.for_each(drop);
- }
-}
-
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<T, F, A: Allocator + Clone> fmt::Debug for DrainFilter<'_, T, F, A>
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+impl<T, F, A: Allocator + Clone> fmt::Debug for ExtractIf<'_, T, F, A>
where
T: fmt::Debug,
F: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("DrainFilter").field(&self.inner.peek().map(|(k, _)| k)).finish()
+ f.debug_tuple("ExtractIf").field(&self.inner.peek().map(|(k, _)| k)).finish()
}
}
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<'a, T, F, A: Allocator + Clone> Iterator for DrainFilter<'_, T, F, A>
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+impl<'a, T, F, A: Allocator + Clone> Iterator for ExtractIf<'_, T, F, A>
where
F: 'a + FnMut(&T) -> bool,
{
@@ -1331,11 +1319,8 @@ where
}
}
-#[unstable(feature = "btree_drain_filter", issue = "70530")]
-impl<T, F, A: Allocator + Clone> FusedIterator for DrainFilter<'_, T, F, A> where
- F: FnMut(&T) -> bool
-{
-}
+#[unstable(feature = "btree_extract_if", issue = "70530")]
+impl<T, F, A: Allocator + Clone> FusedIterator for ExtractIf<'_, T, F, A> where F: FnMut(&T) -> bool {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord, A: Allocator + Clone> Extend<T> for BTreeSet<T, A> {
diff --git a/library/alloc/src/collections/btree/set/tests.rs b/library/alloc/src/collections/btree/set/tests.rs
index a7c839d77..e05bf0e20 100644
--- a/library/alloc/src/collections/btree/set/tests.rs
+++ b/library/alloc/src/collections/btree/set/tests.rs
@@ -366,18 +366,19 @@ fn test_retain() {
}
#[test]
-fn test_drain_filter() {
+fn test_extract_if() {
let mut x = BTreeSet::from([1]);
let mut y = BTreeSet::from([1]);
- x.drain_filter(|_| true);
- y.drain_filter(|_| false);
+ x.extract_if(|_| true).for_each(drop);
+ y.extract_if(|_| false).for_each(drop);
assert_eq!(x.len(), 0);
assert_eq!(y.len(), 1);
}
#[test]
-fn test_drain_filter_drop_panic_leak() {
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
+fn test_extract_if_drop_panic_leak() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
let c = CrashTestDummy::new(2);
@@ -386,7 +387,7 @@ fn test_drain_filter_drop_panic_leak() {
set.insert(b.spawn(Panic::InDrop));
set.insert(c.spawn(Panic::Never));
- catch_unwind(move || drop(set.drain_filter(|dummy| dummy.query(true)))).ok();
+ catch_unwind(move || set.extract_if(|dummy| dummy.query(true)).for_each(drop)).ok();
assert_eq!(a.queried(), 1);
assert_eq!(b.queried(), 1);
@@ -397,7 +398,8 @@ fn test_drain_filter_drop_panic_leak() {
}
#[test]
-fn test_drain_filter_pred_panic_leak() {
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
+fn test_extract_if_pred_panic_leak() {
let a = CrashTestDummy::new(0);
let b = CrashTestDummy::new(1);
let c = CrashTestDummy::new(2);
@@ -406,7 +408,8 @@ fn test_drain_filter_pred_panic_leak() {
set.insert(b.spawn(Panic::InQuery));
set.insert(c.spawn(Panic::InQuery));
- catch_unwind(AssertUnwindSafe(|| drop(set.drain_filter(|dummy| dummy.query(true))))).ok();
+ catch_unwind(AssertUnwindSafe(|| set.extract_if(|dummy| dummy.query(true)).for_each(drop)))
+ .ok();
assert_eq!(a.queried(), 1);
assert_eq!(b.queried(), 1);
@@ -603,8 +606,8 @@ fn assert_sync() {
v.range(..)
}
- fn drain_filter<T: Sync + Ord>(v: &mut BTreeSet<T>) -> impl Sync + '_ {
- v.drain_filter(|_| false)
+ fn extract_if<T: Sync + Ord>(v: &mut BTreeSet<T>) -> impl Sync + '_ {
+ v.extract_if(|_| false)
}
fn difference<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
@@ -642,8 +645,8 @@ fn assert_send() {
v.range(..)
}
- fn drain_filter<T: Send + Ord>(v: &mut BTreeSet<T>) -> impl Send + '_ {
- v.drain_filter(|_| false)
+ fn extract_if<T: Send + Ord>(v: &mut BTreeSet<T>) -> impl Send + '_ {
+ v.extract_if(|_| false)
}
fn difference<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs
index 4cd34ac2f..052edf453 100644
--- a/library/alloc/src/collections/linked_list.rs
+++ b/library/alloc/src/collections/linked_list.rs
@@ -1030,7 +1030,11 @@ impl<T, A: Allocator> LinkedList<T, A> {
/// If the closure returns false, the element will remain in the list and will not be yielded
/// by the iterator.
///
- /// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of
+ /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
+ /// or the iteration short-circuits, then the remaining elements will be retained.
+ /// Use `extract_if().for_each(drop)` if you do not need the returned iterator.
+ ///
+ /// Note that `extract_if` lets you mutate every element in the filter closure, regardless of
/// whether you choose to keep or remove it.
///
/// # Examples
@@ -1038,20 +1042,20 @@ impl<T, A: Allocator> LinkedList<T, A> {
/// Splitting a list into evens and odds, reusing the original list:
///
/// ```
- /// #![feature(drain_filter)]
+ /// #![feature(extract_if)]
/// use std::collections::LinkedList;
///
/// let mut numbers: LinkedList<u32> = LinkedList::new();
/// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
///
- /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<LinkedList<_>>();
+ /// let evens = numbers.extract_if(|x| *x % 2 == 0).collect::<LinkedList<_>>();
/// let odds = numbers;
///
/// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![2, 4, 6, 8, 14]);
/// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 9, 11, 13, 15]);
/// ```
- #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
- pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A>
+ #[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+ pub fn extract_if<F>(&mut self, filter: F) -> ExtractIf<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
@@ -1059,7 +1063,7 @@ impl<T, A: Allocator> LinkedList<T, A> {
let it = self.head;
let old_len = self.len;
- DrainFilter { list: self, it, pred: filter, idx: 0, old_len }
+ ExtractIf { list: self, it, pred: filter, idx: 0, old_len }
}
}
@@ -1803,9 +1807,10 @@ impl<'a, T, A: Allocator> CursorMut<'a, T, A> {
}
}
-/// An iterator produced by calling `drain_filter` on LinkedList.
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-pub struct DrainFilter<
+/// An iterator produced by calling `extract_if` on LinkedList.
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct ExtractIf<
'a,
T: 'a,
F: 'a,
@@ -1820,8 +1825,8 @@ pub struct DrainFilter<
old_len: usize,
}
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
@@ -1849,40 +1854,13 @@ where
}
}
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
-where
- F: FnMut(&mut T) -> bool,
-{
- fn drop(&mut self) {
- struct DropGuard<'r, 'a, T, F, A: Allocator>(&'r mut DrainFilter<'a, T, F, A>)
- where
- F: FnMut(&mut T) -> bool;
-
- impl<'r, 'a, T, F, A: Allocator> Drop for DropGuard<'r, 'a, T, F, A>
- where
- F: FnMut(&mut T) -> bool,
- {
- fn drop(&mut self) {
- self.0.for_each(drop);
- }
- }
-
- while let Some(item) = self.next() {
- let guard = DropGuard(self);
- drop(item);
- mem::forget(guard);
- }
- }
-}
-
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<T: fmt::Debug, F> fmt::Debug for DrainFilter<'_, T, F>
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+impl<T: fmt::Debug, F> fmt::Debug for ExtractIf<'_, T, F>
where
F: FnMut(&mut T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("DrainFilter").field(&self.list).finish()
+ f.debug_tuple("ExtractIf").field(&self.list).finish()
}
}
diff --git a/library/alloc/src/collections/linked_list/tests.rs b/library/alloc/src/collections/linked_list/tests.rs
index 04594d55b..8dcd59d12 100644
--- a/library/alloc/src/collections/linked_list/tests.rs
+++ b/library/alloc/src/collections/linked_list/tests.rs
@@ -540,10 +540,10 @@ fn test_show() {
}
#[test]
-fn drain_filter_test() {
+fn extract_if_test() {
let mut m: LinkedList<u32> = LinkedList::new();
m.extend(&[1, 2, 3, 4, 5, 6]);
- let deleted = m.drain_filter(|v| *v < 4).collect::<Vec<_>>();
+ let deleted = m.extract_if(|v| *v < 4).collect::<Vec<_>>();
check_links(&m);
@@ -555,7 +555,7 @@ fn drain_filter_test() {
fn drain_to_empty_test() {
let mut m: LinkedList<u32> = LinkedList::new();
m.extend(&[1, 2, 3, 4, 5, 6]);
- let deleted = m.drain_filter(|_| true).collect::<Vec<_>>();
+ let deleted = m.extract_if(|_| true).collect::<Vec<_>>();
check_links(&m);
@@ -811,11 +811,11 @@ fn test_contains() {
}
#[test]
-fn drain_filter_empty() {
+fn extract_if_empty() {
let mut list: LinkedList<i32> = LinkedList::new();
{
- let mut iter = list.drain_filter(|_| true);
+ let mut iter = list.extract_if(|_| true);
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next(), None);
assert_eq!(iter.size_hint(), (0, Some(0)));
@@ -828,13 +828,13 @@ fn drain_filter_empty() {
}
#[test]
-fn drain_filter_zst() {
+fn extract_if_zst() {
let mut list: LinkedList<_> = [(), (), (), (), ()].into_iter().collect();
let initial_len = list.len();
let mut count = 0;
{
- let mut iter = list.drain_filter(|_| true);
+ let mut iter = list.extract_if(|_| true);
assert_eq!(iter.size_hint(), (0, Some(initial_len)));
while let Some(_) = iter.next() {
count += 1;
@@ -851,14 +851,14 @@ fn drain_filter_zst() {
}
#[test]
-fn drain_filter_false() {
+fn extract_if_false() {
let mut list: LinkedList<_> = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
let initial_len = list.len();
let mut count = 0;
{
- let mut iter = list.drain_filter(|_| false);
+ let mut iter = list.extract_if(|_| false);
assert_eq!(iter.size_hint(), (0, Some(initial_len)));
for _ in iter.by_ref() {
count += 1;
@@ -874,14 +874,14 @@ fn drain_filter_false() {
}
#[test]
-fn drain_filter_true() {
+fn extract_if_true() {
let mut list: LinkedList<_> = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
let initial_len = list.len();
let mut count = 0;
{
- let mut iter = list.drain_filter(|_| true);
+ let mut iter = list.extract_if(|_| true);
assert_eq!(iter.size_hint(), (0, Some(initial_len)));
while let Some(_) = iter.next() {
count += 1;
@@ -898,7 +898,7 @@ fn drain_filter_true() {
}
#[test]
-fn drain_filter_complex() {
+fn extract_if_complex() {
{
// [+xxx++++++xxxxx++++x+x++]
let mut list = [
@@ -908,7 +908,7 @@ fn drain_filter_complex() {
.into_iter()
.collect::<LinkedList<_>>();
- let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ let removed = list.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
assert_eq!(removed.len(), 10);
assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
@@ -926,7 +926,7 @@ fn drain_filter_complex() {
.into_iter()
.collect::<LinkedList<_>>();
- let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ let removed = list.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
assert_eq!(removed.len(), 10);
assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
@@ -944,7 +944,7 @@ fn drain_filter_complex() {
.into_iter()
.collect::<LinkedList<_>>();
- let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ let removed = list.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
assert_eq!(removed.len(), 10);
assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
@@ -961,7 +961,7 @@ fn drain_filter_complex() {
.into_iter()
.collect::<LinkedList<_>>();
- let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ let removed = list.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
assert_eq!(removed.len(), 10);
assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
@@ -975,7 +975,7 @@ fn drain_filter_complex() {
.into_iter()
.collect::<LinkedList<_>>();
- let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ let removed = list.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
assert_eq!(removed.len(), 10);
assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
@@ -985,7 +985,8 @@ fn drain_filter_complex() {
}
#[test]
-fn drain_filter_drop_panic_leak() {
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
+fn extract_if_drop_panic_leak() {
let d0 = CrashTestDummy::new(0);
let d1 = CrashTestDummy::new(1);
let d2 = CrashTestDummy::new(2);
@@ -1004,21 +1005,28 @@ fn drain_filter_drop_panic_leak() {
q.push_front(d1.spawn(Panic::InDrop));
q.push_front(d0.spawn(Panic::Never));
- catch_unwind(AssertUnwindSafe(|| drop(q.drain_filter(|_| true)))).unwrap_err();
+ catch_unwind(AssertUnwindSafe(|| q.extract_if(|_| true).for_each(drop))).unwrap_err();
assert_eq!(d0.dropped(), 1);
assert_eq!(d1.dropped(), 1);
+ assert_eq!(d2.dropped(), 0);
+ assert_eq!(d3.dropped(), 0);
+ assert_eq!(d4.dropped(), 0);
+ assert_eq!(d5.dropped(), 0);
+ assert_eq!(d6.dropped(), 0);
+ assert_eq!(d7.dropped(), 0);
+ drop(q);
assert_eq!(d2.dropped(), 1);
assert_eq!(d3.dropped(), 1);
assert_eq!(d4.dropped(), 1);
assert_eq!(d5.dropped(), 1);
assert_eq!(d6.dropped(), 1);
assert_eq!(d7.dropped(), 1);
- assert!(q.is_empty());
}
#[test]
-fn drain_filter_pred_panic_leak() {
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
+fn extract_if_pred_panic_leak() {
static mut DROPS: i32 = 0;
#[derive(Debug)]
@@ -1043,7 +1051,7 @@ fn drain_filter_pred_panic_leak() {
q.push_front(D(0));
catch_unwind(AssertUnwindSafe(|| {
- drop(q.drain_filter(|item| if item.0 >= 2 { panic!() } else { true }))
+ q.extract_if(|item| if item.0 >= 2 { panic!() } else { true }).for_each(drop)
}))
.ok();
@@ -1124,6 +1132,7 @@ fn test_drop_clear() {
}
#[test]
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn test_drop_panic() {
static mut DROPS: i32 = 0;
diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs
index f99395c72..62856fc9a 100644
--- a/library/alloc/src/ffi/c_str.rs
+++ b/library/alloc/src/ffi/c_str.rs
@@ -888,7 +888,7 @@ impl From<&CStr> for Arc<CStr> {
#[stable(feature = "shared_from_slice2", since = "1.24.0")]
impl From<CString> for Rc<CStr> {
/// Converts a [`CString`] into an <code>[Rc]<[CStr]></code> by moving the [`CString`]
- /// data into a new [`Arc`] buffer.
+ /// data into a new [`Rc`] buffer.
#[inline]
fn from(s: CString) -> Rc<CStr> {
let rc: Rc<[u8]> = Rc::from(s.into_inner());
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index 59fa91c10..967ad3a0e 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -56,6 +56,11 @@
//! [`Rc`]: rc
//! [`RefCell`]: core::cell
+// To run alloc tests without x.py without ending up with two copies of alloc, Miri needs to be
+// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
+// rustc itself never sets the feature, so this line has no affect there.
+#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
+//
#![allow(unused_attributes)]
#![stable(feature = "alloc", since = "1.36.0")]
#![doc(
@@ -75,11 +80,6 @@
))]
#![no_std]
#![needs_allocator]
-// To run alloc tests without x.py without ending up with two copies of alloc, Miri needs to be
-// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
-// rustc itself never sets the feature, so this line has no affect there.
-#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
-//
// Lints:
#![deny(unsafe_op_in_unsafe_fn)]
#![deny(fuzzy_provenance_casts)]
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index 38a711ac7..b3305b8ca 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -258,19 +258,19 @@ use core::iter;
use core::marker::{PhantomData, Unsize};
#[cfg(not(no_global_oom_handling))]
use core::mem::size_of_val;
-use core::mem::{self, align_of_val_raw, forget};
-use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
+use core::mem::{self, align_of_val_raw, forget, ManuallyDrop};
+use core::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Receiver};
use core::panic::{RefUnwindSafe, UnwindSafe};
#[cfg(not(no_global_oom_handling))]
use core::pin::Pin;
-use core::ptr::{self, NonNull};
+use core::ptr::{self, drop_in_place, NonNull};
#[cfg(not(no_global_oom_handling))]
use core::slice::from_raw_parts_mut;
#[cfg(not(no_global_oom_handling))]
use crate::alloc::handle_alloc_error;
#[cfg(not(no_global_oom_handling))]
-use crate::alloc::{box_free, WriteCloneIntoRaw};
+use crate::alloc::WriteCloneIntoRaw;
use crate::alloc::{AllocError, Allocator, Global, Layout};
use crate::borrow::{Cow, ToOwned};
#[cfg(not(no_global_oom_handling))]
@@ -1169,7 +1169,7 @@ impl<T: ?Sized> Rc<T> {
#[inline]
#[stable(feature = "ptr_eq", since = "1.17.0")]
/// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
- /// [`ptr::eq`]. See [that function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
+ /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
///
/// # Examples
///
@@ -1184,7 +1184,7 @@ impl<T: ?Sized> Rc<T> {
/// assert!(!Rc::ptr_eq(&five, &other_five));
/// ```
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
- this.ptr.as_ptr() == other.ptr.as_ptr()
+ this.ptr.as_ptr() as *const () == other.ptr.as_ptr() as *const ()
}
}
@@ -1442,23 +1442,21 @@ impl<T: ?Sized> Rc<T> {
}
#[cfg(not(no_global_oom_handling))]
- fn from_box(v: Box<T>) -> Rc<T> {
+ fn from_box(src: Box<T>) -> Rc<T> {
unsafe {
- let (box_unique, alloc) = Box::into_unique(v);
- let bptr = box_unique.as_ptr();
-
- let value_size = size_of_val(&*bptr);
- let ptr = Self::allocate_for_ptr(bptr);
+ let value_size = size_of_val(&*src);
+ let ptr = Self::allocate_for_ptr(&*src);
// Copy value as bytes
ptr::copy_nonoverlapping(
- bptr as *const T as *const u8,
+ &*src as *const T as *const u8,
&mut (*ptr).value as *mut _ as *mut u8,
value_size,
);
// Free the allocation without dropping its contents
- box_free(box_unique, alloc);
+ let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop<T>);
+ drop(src);
Self::from_ptr(ptr)
}
@@ -2468,8 +2466,8 @@ impl<T: ?Sized> Weak<T> {
}
/// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
- /// both don't point to any allocation (because they were created with `Weak::new()`). See [that
- /// function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
+ /// both don't point to any allocation (because they were created with `Weak::new()`). However,
+ /// this function ignores the metadata of `dyn Trait` pointers.
///
/// # Notes
///
@@ -2510,7 +2508,7 @@ impl<T: ?Sized> Weak<T> {
#[must_use]
#[stable(feature = "weak_ptr_eq", since = "1.39.0")]
pub fn ptr_eq(&self, other: &Self) -> bool {
- self.ptr.as_ptr() == other.ptr.as_ptr()
+ ptr::eq(self.ptr.as_ptr() as *const (), other.ptr.as_ptr() as *const ())
}
}
@@ -2746,3 +2744,139 @@ fn data_offset_align(align: usize) -> usize {
let layout = Layout::new::<RcBox<()>>();
layout.size() + layout.padding_needed_for(align)
}
+
+/// A uniquely owned `Rc`
+///
+/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
+/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
+/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
+///
+/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
+/// use case is to have an object be mutable during its initialization phase but then have it become
+/// immutable and converted to a normal `Rc`.
+///
+/// This can be used as a flexible way to create cyclic data structures, as in the example below.
+///
+/// ```
+/// #![feature(unique_rc_arc)]
+/// use std::rc::{Rc, Weak, UniqueRc};
+///
+/// struct Gadget {
+/// #[allow(dead_code)]
+/// me: Weak<Gadget>,
+/// }
+///
+/// fn create_gadget() -> Option<Rc<Gadget>> {
+/// let mut rc = UniqueRc::new(Gadget {
+/// me: Weak::new(),
+/// });
+/// rc.me = UniqueRc::downgrade(&rc);
+/// Some(UniqueRc::into_rc(rc))
+/// }
+///
+/// create_gadget().unwrap();
+/// ```
+///
+/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
+/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
+/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
+/// including fallible or async constructors.
+#[unstable(feature = "unique_rc_arc", issue = "112566")]
+#[derive(Debug)]
+pub struct UniqueRc<T> {
+ ptr: NonNull<RcBox<T>>,
+ phantom: PhantomData<RcBox<T>>,
+}
+
+impl<T> UniqueRc<T> {
+ /// Creates a new `UniqueRc`
+ ///
+ /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
+ /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
+ /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
+ /// point to the new [`Rc`].
+ #[cfg(not(no_global_oom_handling))]
+ #[unstable(feature = "unique_rc_arc", issue = "112566")]
+ pub fn new(value: T) -> Self {
+ Self {
+ ptr: Box::leak(Box::new(RcBox {
+ strong: Cell::new(0),
+ // keep one weak reference so if all the weak pointers that are created are dropped
+ // the UniqueRc still stays valid.
+ weak: Cell::new(1),
+ value,
+ }))
+ .into(),
+ phantom: PhantomData,
+ }
+ }
+
+ /// Creates a new weak reference to the `UniqueRc`
+ ///
+ /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
+ /// to a [`Rc`] using [`UniqueRc::into_rc`].
+ #[unstable(feature = "unique_rc_arc", issue = "112566")]
+ pub fn downgrade(this: &Self) -> Weak<T> {
+ // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
+ // one strong reference before converting to a regular Rc.
+ unsafe {
+ this.ptr.as_ref().inc_weak();
+ }
+ Weak { ptr: this.ptr }
+ }
+
+ /// Converts the `UniqueRc` into a regular [`Rc`]
+ ///
+ /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
+ /// is passed to `into_rc`.
+ ///
+ /// Any weak references created before this method is called can now be upgraded to strong
+ /// references.
+ #[unstable(feature = "unique_rc_arc", issue = "112566")]
+ pub fn into_rc(this: Self) -> Rc<T> {
+ let mut this = ManuallyDrop::new(this);
+ // SAFETY: This pointer was allocated at creation time so we know it is valid.
+ unsafe {
+ // Convert our weak reference into a strong reference
+ this.ptr.as_mut().strong.set(1);
+ Rc::from_inner(this.ptr)
+ }
+ }
+}
+
+#[unstable(feature = "unique_rc_arc", issue = "112566")]
+impl<T> Deref for UniqueRc<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ // SAFETY: This pointer was allocated at creation time so we know it is valid.
+ unsafe { &self.ptr.as_ref().value }
+ }
+}
+
+#[unstable(feature = "unique_rc_arc", issue = "112566")]
+impl<T> DerefMut for UniqueRc<T> {
+ fn deref_mut(&mut self) -> &mut T {
+ // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
+ // have unique ownership and therefore it's safe to make a mutable reference because
+ // `UniqueRc` owns the only strong reference to itself.
+ unsafe { &mut (*self.ptr.as_ptr()).value }
+ }
+}
+
+#[unstable(feature = "unique_rc_arc", issue = "112566")]
+unsafe impl<#[may_dangle] T> Drop for UniqueRc<T> {
+ fn drop(&mut self) {
+ unsafe {
+ // destroy the contained object
+ drop_in_place(DerefMut::deref_mut(self));
+
+ // remove the implicit "strong weak" pointer now that we've destroyed the contents.
+ self.ptr.as_ref().dec_weak();
+
+ if self.ptr.as_ref().weak() == 0 {
+ Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
+ }
+ }
+ }
+}
diff --git a/library/alloc/src/rc/tests.rs b/library/alloc/src/rc/tests.rs
index 2784108e0..1f221b86f 100644
--- a/library/alloc/src/rc/tests.rs
+++ b/library/alloc/src/rc/tests.rs
@@ -574,3 +574,48 @@ fn test_rc_cyclic_with_two_ref() {
assert_eq!(Rc::strong_count(&two_refs), 3);
assert_eq!(Rc::weak_count(&two_refs), 2);
}
+
+#[test]
+fn test_unique_rc_weak() {
+ let rc = UniqueRc::new(42);
+ let weak = UniqueRc::downgrade(&rc);
+ assert!(weak.upgrade().is_none());
+
+ let _rc = UniqueRc::into_rc(rc);
+ assert_eq!(*weak.upgrade().unwrap(), 42);
+}
+
+#[test]
+fn test_unique_rc_drop_weak() {
+ let rc = UniqueRc::new(42);
+ let weak = UniqueRc::downgrade(&rc);
+ mem::drop(weak);
+
+ let rc = UniqueRc::into_rc(rc);
+ assert_eq!(*rc, 42);
+}
+
+#[test]
+fn test_unique_rc_drops_contents() {
+ let mut dropped = false;
+ struct DropMe<'a>(&'a mut bool);
+ impl Drop for DropMe<'_> {
+ fn drop(&mut self) {
+ *self.0 = true;
+ }
+ }
+ {
+ let rc = UniqueRc::new(DropMe(&mut dropped));
+ drop(rc);
+ }
+ assert!(dropped);
+}
+
+#[test]
+fn test_unique_rc_weak_clone_holding_ref() {
+ let mut v = UniqueRc::new(0u8);
+ let w = UniqueRc::downgrade(&v);
+ let r = &mut *v;
+ let _ = w.clone(); // touch weak count
+ *r = 123;
+}
diff --git a/library/alloc/src/slice/tests.rs b/library/alloc/src/slice/tests.rs
index f674530aa..54bc4e77b 100644
--- a/library/alloc/src/slice/tests.rs
+++ b/library/alloc/src/slice/tests.rs
@@ -187,6 +187,7 @@ std::thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));
#[test]
#[cfg_attr(target_os = "emscripten", ignore)] // no threads
+#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn panic_safe() {
panic::update_hook(move |prev, info| {
if !SILENCE_PANIC.with(|s| s.get()) {
diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs
index 59e3f887b..ad7b77f54 100644
--- a/library/alloc/src/string.rs
+++ b/library/alloc/src/string.rs
@@ -1853,26 +1853,27 @@ impl String {
/// Consumes and leaks the `String`, returning a mutable reference to the contents,
/// `&'a mut str`.
///
- /// This is mainly useful for data that lives for the remainder of
- /// the program's life. Dropping the returned reference will cause a memory
- /// leak.
+ /// The caller has free choice over the returned lifetime, including `'static`. Indeed,
+ /// this function is ideally used for data that lives for the remainder of the program's life,
+ /// as dropping the returned reference will cause a memory leak.
///
/// It does not reallocate or shrink the `String`,
/// so the leaked allocation may include unused capacity that is not part
- /// of the returned slice.
+ /// of the returned slice. If you don't want that, call [`into_boxed_str`],
+ /// and then [`Box::leak`].
+ ///
+ /// [`into_boxed_str`]: Self::into_boxed_str
///
/// # Examples
///
/// Simple usage:
///
/// ```
- /// #![feature(string_leak)]
- ///
/// let x = String::from("bucket");
/// let static_ref: &'static mut str = x.leak();
/// assert_eq!(static_ref, "bucket");
/// ```
- #[unstable(feature = "string_leak", issue = "102929")]
+ #[stable(feature = "string_leak", since = "1.72.0")]
#[inline]
pub fn leak<'a>(self) -> &'a mut str {
let slice = self.vec.leak();
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index bfdb7a92b..5bb1a93ae 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -33,7 +33,7 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release};
#[cfg(not(no_global_oom_handling))]
use crate::alloc::handle_alloc_error;
#[cfg(not(no_global_oom_handling))]
-use crate::alloc::{box_free, WriteCloneIntoRaw};
+use crate::alloc::WriteCloneIntoRaw;
use crate::alloc::{AllocError, Allocator, Global, Layout};
use crate::borrow::{Cow, ToOwned};
use crate::boxed::Box;
@@ -1267,7 +1267,7 @@ impl<T: ?Sized> Arc<T> {
}
/// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to
- /// [`ptr::eq`]. See [that function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
+ /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
///
/// # Examples
///
@@ -1287,7 +1287,7 @@ impl<T: ?Sized> Arc<T> {
#[must_use]
#[stable(feature = "ptr_eq", since = "1.17.0")]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
- this.ptr.as_ptr() == other.ptr.as_ptr()
+ this.ptr.as_ptr() as *const () == other.ptr.as_ptr() as *const ()
}
}
@@ -1360,23 +1360,21 @@ impl<T: ?Sized> Arc<T> {
}
#[cfg(not(no_global_oom_handling))]
- fn from_box(v: Box<T>) -> Arc<T> {
+ fn from_box(src: Box<T>) -> Arc<T> {
unsafe {
- let (box_unique, alloc) = Box::into_unique(v);
- let bptr = box_unique.as_ptr();
-
- let value_size = size_of_val(&*bptr);
- let ptr = Self::allocate_for_ptr(bptr);
+ let value_size = size_of_val(&*src);
+ let ptr = Self::allocate_for_ptr(&*src);
// Copy value as bytes
ptr::copy_nonoverlapping(
- bptr as *const T as *const u8,
+ &*src as *const T as *const u8,
&mut (*ptr).data as *mut _ as *mut u8,
value_size,
);
// Free the allocation without dropping its contents
- box_free(box_unique, alloc);
+ let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop<T>);
+ drop(src);
Self::from_ptr(ptr)
}
@@ -2256,8 +2254,8 @@ impl<T: ?Sized> Weak<T> {
}
/// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
- /// both don't point to any allocation (because they were created with `Weak::new()`). See [that
- /// function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers.
+ /// both don't point to any allocation (because they were created with `Weak::new()`). However,
+ /// this function ignores the metadata of `dyn Trait` pointers.
///
/// # Notes
///
@@ -2300,7 +2298,7 @@ impl<T: ?Sized> Weak<T> {
#[must_use]
#[stable(feature = "weak_ptr_eq", since = "1.39.0")]
pub fn ptr_eq(&self, other: &Self) -> bool {
- self.ptr.as_ptr() == other.ptr.as_ptr()
+ ptr::eq(self.ptr.as_ptr() as *const (), other.ptr.as_ptr() as *const ())
}
}
diff --git a/library/alloc/src/vec/drain_filter.rs b/library/alloc/src/vec/drain_filter.rs
deleted file mode 100644
index 21b090234..000000000
--- a/library/alloc/src/vec/drain_filter.rs
+++ /dev/null
@@ -1,197 +0,0 @@
-use crate::alloc::{Allocator, Global};
-use core::mem::{ManuallyDrop, SizedTypeProperties};
-use core::ptr;
-use core::slice;
-
-use super::Vec;
-
-/// An iterator which uses a closure to determine if an element should be removed.
-///
-/// This struct is created by [`Vec::drain_filter`].
-/// See its documentation for more.
-///
-/// # Example
-///
-/// ```
-/// #![feature(drain_filter)]
-///
-/// let mut v = vec![0, 1, 2];
-/// let iter: std::vec::DrainFilter<'_, _, _> = v.drain_filter(|x| *x % 2 == 0);
-/// ```
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-#[derive(Debug)]
-pub struct DrainFilter<
- 'a,
- T,
- F,
- #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
-> where
- F: FnMut(&mut T) -> bool,
-{
- pub(super) vec: &'a mut Vec<T, A>,
- /// The index of the item that will be inspected by the next call to `next`.
- pub(super) idx: usize,
- /// The number of items that have been drained (removed) thus far.
- pub(super) del: usize,
- /// The original length of `vec` prior to draining.
- pub(super) old_len: usize,
- /// The filter test predicate.
- pub(super) pred: F,
- /// A flag that indicates a panic has occurred in the filter test predicate.
- /// This is used as a hint in the drop implementation to prevent consumption
- /// of the remainder of the `DrainFilter`. Any unprocessed items will be
- /// backshifted in the `vec`, but no further items will be dropped or
- /// tested by the filter predicate.
- pub(super) panic_flag: bool,
-}
-
-impl<T, F, A: Allocator> DrainFilter<'_, T, F, A>
-where
- F: FnMut(&mut T) -> bool,
-{
- /// Returns a reference to the underlying allocator.
- #[unstable(feature = "allocator_api", issue = "32838")]
- #[inline]
- pub fn allocator(&self) -> &A {
- self.vec.allocator()
- }
-
- /// Keep unyielded elements in the source `Vec`.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(drain_filter)]
- /// #![feature(drain_keep_rest)]
- ///
- /// let mut vec = vec!['a', 'b', 'c'];
- /// let mut drain = vec.drain_filter(|_| true);
- ///
- /// assert_eq!(drain.next().unwrap(), 'a');
- ///
- /// // This call keeps 'b' and 'c' in the vec.
- /// drain.keep_rest();
- ///
- /// // If we wouldn't call `keep_rest()`,
- /// // `vec` would be empty.
- /// assert_eq!(vec, ['b', 'c']);
- /// ```
- #[unstable(feature = "drain_keep_rest", issue = "101122")]
- pub fn keep_rest(self) {
- // At this moment layout looks like this:
- //
- // _____________________/-- old_len
- // / \
- // [kept] [yielded] [tail]
- // \_______/ ^-- idx
- // \-- del
- //
- // Normally `Drop` impl would drop [tail] (via .for_each(drop), ie still calling `pred`)
- //
- // 1. Move [tail] after [kept]
- // 2. Update length of the original vec to `old_len - del`
- // a. In case of ZST, this is the only thing we want to do
- // 3. Do *not* drop self, as everything is put in a consistent state already, there is nothing to do
- let mut this = ManuallyDrop::new(self);
-
- unsafe {
- // ZSTs have no identity, so we don't need to move them around.
- if !T::IS_ZST && this.idx < this.old_len && this.del > 0 {
- let ptr = this.vec.as_mut_ptr();
- let src = ptr.add(this.idx);
- let dst = src.sub(this.del);
- let tail_len = this.old_len - this.idx;
- src.copy_to(dst, tail_len);
- }
-
- let new_len = this.old_len - this.del;
- this.vec.set_len(new_len);
- }
- }
-}
-
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
-where
- F: FnMut(&mut T) -> bool,
-{
- type Item = T;
-
- fn next(&mut self) -> Option<T> {
- unsafe {
- while self.idx < self.old_len {
- let i = self.idx;
- let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
- self.panic_flag = true;
- let drained = (self.pred)(&mut v[i]);
- self.panic_flag = false;
- // Update the index *after* the predicate is called. If the index
- // is updated prior and the predicate panics, the element at this
- // index would be leaked.
- self.idx += 1;
- if drained {
- self.del += 1;
- return Some(ptr::read(&v[i]));
- } else if self.del > 0 {
- let del = self.del;
- let src: *const T = &v[i];
- let dst: *mut T = &mut v[i - del];
- ptr::copy_nonoverlapping(src, dst, 1);
- }
- }
- None
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- (0, Some(self.old_len - self.idx))
- }
-}
-
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
-where
- F: FnMut(&mut T) -> bool,
-{
- fn drop(&mut self) {
- struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator>
- where
- F: FnMut(&mut T) -> bool,
- {
- drain: &'b mut DrainFilter<'a, T, F, A>,
- }
-
- impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A>
- where
- F: FnMut(&mut T) -> bool,
- {
- fn drop(&mut self) {
- unsafe {
- if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
- // This is a pretty messed up state, and there isn't really an
- // obviously right thing to do. We don't want to keep trying
- // to execute `pred`, so we just backshift all the unprocessed
- // elements and tell the vec that they still exist. The backshift
- // is required to prevent a double-drop of the last successfully
- // drained item prior to a panic in the predicate.
- let ptr = self.drain.vec.as_mut_ptr();
- let src = ptr.add(self.drain.idx);
- let dst = src.sub(self.drain.del);
- let tail_len = self.drain.old_len - self.drain.idx;
- src.copy_to(dst, tail_len);
- }
- self.drain.vec.set_len(self.drain.old_len - self.drain.del);
- }
- }
- }
-
- let backshift = BackshiftOnDrop { drain: self };
-
- // Attempt to consume any remaining elements if the filter predicate
- // has not yet panicked. We'll backshift any remaining elements
- // whether we've already panicked or if the consumption here panics.
- if !backshift.drain.panic_flag {
- backshift.drain.for_each(drop);
- }
- }
-}
diff --git a/library/alloc/src/vec/extract_if.rs b/library/alloc/src/vec/extract_if.rs
new file mode 100644
index 000000000..118cfdb36
--- /dev/null
+++ b/library/alloc/src/vec/extract_if.rs
@@ -0,0 +1,113 @@
+use crate::alloc::{Allocator, Global};
+use core::ptr;
+use core::slice;
+
+use super::Vec;
+
+/// An iterator which uses a closure to determine if an element should be removed.
+///
+/// This struct is created by [`Vec::extract_if`].
+/// See its documentation for more.
+///
+/// # Example
+///
+/// ```
+/// #![feature(extract_if)]
+///
+/// let mut v = vec![0, 1, 2];
+/// let iter: std::vec::ExtractIf<'_, _, _> = v.extract_if(|x| *x % 2 == 0);
+/// ```
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+#[derive(Debug)]
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+pub struct ExtractIf<
+ 'a,
+ T,
+ F,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
+> where
+ F: FnMut(&mut T) -> bool,
+{
+ pub(super) vec: &'a mut Vec<T, A>,
+ /// The index of the item that will be inspected by the next call to `next`.
+ pub(super) idx: usize,
+ /// The number of items that have been drained (removed) thus far.
+ pub(super) del: usize,
+ /// The original length of `vec` prior to draining.
+ pub(super) old_len: usize,
+ /// The filter test predicate.
+ pub(super) pred: F,
+}
+
+impl<T, F, A: Allocator> ExtractIf<'_, T, F, A>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ /// Returns a reference to the underlying allocator.
+ #[unstable(feature = "allocator_api", issue = "32838")]
+ #[inline]
+ pub fn allocator(&self) -> &A {
+ self.vec.allocator()
+ }
+}
+
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ unsafe {
+ while self.idx < self.old_len {
+ let i = self.idx;
+ let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
+ let drained = (self.pred)(&mut v[i]);
+ // Update the index *after* the predicate is called. If the index
+ // is updated prior and the predicate panics, the element at this
+ // index would be leaked.
+ self.idx += 1;
+ if drained {
+ self.del += 1;
+ return Some(ptr::read(&v[i]));
+ } else if self.del > 0 {
+ let del = self.del;
+ let src: *const T = &v[i];
+ let dst: *mut T = &mut v[i - del];
+ ptr::copy_nonoverlapping(src, dst, 1);
+ }
+ }
+ None
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(self.old_len - self.idx))
+ }
+}
+
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+impl<T, F, A: Allocator> Drop for ExtractIf<'_, T, F, A>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ fn drop(&mut self) {
+ unsafe {
+ if self.idx < self.old_len && self.del > 0 {
+ // This is a pretty messed up state, and there isn't really an
+ // obviously right thing to do. We don't want to keep trying
+ // to execute `pred`, so we just backshift all the unprocessed
+ // elements and tell the vec that they still exist. The backshift
+ // is required to prevent a double-drop of the last successfully
+ // drained item prior to a panic in the predicate.
+ let ptr = self.vec.as_mut_ptr();
+ let src = ptr.add(self.idx);
+ let dst = src.sub(self.del);
+ let tail_len = self.old_len - self.idx;
+ src.copy_to(dst, tail_len);
+ }
+ self.vec.set_len(self.old_len - self.del);
+ }
+ }
+}
diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs
index 47661a3d3..598ecf05e 100644
--- a/library/alloc/src/vec/mod.rs
+++ b/library/alloc/src/vec/mod.rs
@@ -71,10 +71,10 @@ use crate::boxed::Box;
use crate::collections::TryReserveError;
use crate::raw_vec::RawVec;
-#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
-pub use self::drain_filter::DrainFilter;
+#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+pub use self::extract_if::ExtractIf;
-mod drain_filter;
+mod extract_if;
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "vec_splice", since = "1.21.0")]
@@ -560,22 +560,20 @@ impl<T> Vec<T> {
/// Using memory that was allocated elsewhere:
///
/// ```rust
- /// #![feature(allocator_api)]
- ///
- /// use std::alloc::{AllocError, Allocator, Global, Layout};
+ /// use std::alloc::{alloc, Layout};
///
/// fn main() {
/// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
///
/// let vec = unsafe {
- /// let mem = match Global.allocate(layout) {
- /// Ok(mem) => mem.cast::<u32>().as_ptr(),
- /// Err(AllocError) => return,
- /// };
+ /// let mem = alloc(layout).cast::<u32>();
+ /// if mem.is_null() {
+ /// return;
+ /// }
///
/// mem.write(1_000_000);
///
- /// Vec::from_raw_parts_in(mem, 1, 16, Global)
+ /// Vec::from_raw_parts(mem, 1, 16)
/// };
///
/// assert_eq!(vec, &[1_000_000]);
@@ -758,19 +756,22 @@ impl<T, A: Allocator> Vec<T, A> {
/// Using memory that was allocated elsewhere:
///
/// ```rust
- /// use std::alloc::{alloc, Layout};
+ /// #![feature(allocator_api)]
+ ///
+ /// use std::alloc::{AllocError, Allocator, Global, Layout};
///
/// fn main() {
/// let layout = Layout::array::<u32>(16).expect("overflow cannot happen");
+ ///
/// let vec = unsafe {
- /// let mem = alloc(layout).cast::<u32>();
- /// if mem.is_null() {
- /// return;
- /// }
+ /// let mem = match Global.allocate(layout) {
+ /// Ok(mem) => mem.cast::<u32>().as_ptr(),
+ /// Err(AllocError) => return,
+ /// };
///
/// mem.write(1_000_000);
///
- /// Vec::from_raw_parts(mem, 1, 16)
+ /// Vec::from_raw_parts_in(mem, 1, 16, Global)
/// };
///
/// assert_eq!(vec, &[1_000_000]);
@@ -2355,7 +2356,7 @@ impl<T: Clone, A: Allocator> Vec<T, A> {
let len = self.len();
if new_len > len {
- self.extend_with(new_len - len, ExtendElement(value))
+ self.extend_with(new_len - len, value)
} else {
self.truncate(new_len);
}
@@ -2469,26 +2470,10 @@ impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {
}
}
-// This code generalizes `extend_with_{element,default}`.
-trait ExtendWith<T> {
- fn next(&mut self) -> T;
- fn last(self) -> T;
-}
-
-struct ExtendElement<T>(T);
-impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
- fn next(&mut self) -> T {
- self.0.clone()
- }
- fn last(self) -> T {
- self.0
- }
-}
-
-impl<T, A: Allocator> Vec<T, A> {
+impl<T: Clone, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
- /// Extend the vector by `n` values, using the given generator.
- fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
+ /// Extend the vector by `n` clones of value.
+ fn extend_with(&mut self, n: usize, value: T) {
self.reserve(n);
unsafe {
@@ -2500,15 +2485,15 @@ impl<T, A: Allocator> Vec<T, A> {
// Write all elements except the last one
for _ in 1..n {
- ptr::write(ptr, value.next());
+ ptr::write(ptr, value.clone());
ptr = ptr.add(1);
- // Increment the length in every step in case next() panics
+ // Increment the length in every step in case clone() panics
local_len.increment_len(1);
}
if n > 0 {
// We can write the last element directly without cloning needlessly
- ptr::write(ptr, value.last());
+ ptr::write(ptr, value);
local_len.increment_len(1);
}
@@ -2908,6 +2893,12 @@ impl<T, A: Allocator> Vec<T, A> {
/// If the closure returns false, the element will remain in the vector and will not be yielded
/// by the iterator.
///
+ /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
+ /// or the iteration short-circuits, then the remaining elements will be retained.
+ /// Use [`retain`] with a negated predicate if you do not need the returned iterator.
+ ///
+ /// [`retain`]: Vec::retain
+ ///
/// Using this method is equivalent to the following code:
///
/// ```
@@ -2926,10 +2917,10 @@ impl<T, A: Allocator> Vec<T, A> {
/// # assert_eq!(vec, vec![1, 4, 5]);
/// ```
///
- /// But `drain_filter` is easier to use. `drain_filter` is also more efficient,
+ /// But `extract_if` is easier to use. `extract_if` is also more efficient,
/// because it can backshift the elements of the array in bulk.
///
- /// Note that `drain_filter` also lets you mutate every element in the filter closure,
+ /// Note that `extract_if` also lets you mutate every element in the filter closure,
/// regardless of whether you choose to keep or remove it.
///
/// # Examples
@@ -2937,17 +2928,17 @@ impl<T, A: Allocator> Vec<T, A> {
/// Splitting an array into evens and odds, reusing the original allocation:
///
/// ```
- /// #![feature(drain_filter)]
+ /// #![feature(extract_if)]
/// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
///
- /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ /// let evens = numbers.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
/// let odds = numbers;
///
/// assert_eq!(evens, vec![2, 4, 6, 8, 14]);
/// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]);
/// ```
- #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
- pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F, A>
+ #[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
+ pub fn extract_if<F>(&mut self, filter: F) -> ExtractIf<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
@@ -2958,7 +2949,7 @@ impl<T, A: Allocator> Vec<T, A> {
self.set_len(0);
}
- DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
+ ExtractIf { vec: self, idx: 0, del: 0, old_len, pred: filter }
}
}
@@ -2988,9 +2979,14 @@ impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec<T, A> {
/// Implements comparison of vectors, [lexicographically](Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: PartialOrd, A: Allocator> PartialOrd for Vec<T, A> {
+impl<T, A1, A2> PartialOrd<Vec<T, A2>> for Vec<T, A1>
+where
+ T: PartialOrd,
+ A1: Allocator,
+ A2: Allocator,
+{
#[inline]
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ fn partial_cmp(&self, other: &Vec<T, A2>) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
}
diff --git a/library/alloc/src/vec/spec_from_elem.rs b/library/alloc/src/vec/spec_from_elem.rs
index ff364c033..da43d17bf 100644
--- a/library/alloc/src/vec/spec_from_elem.rs
+++ b/library/alloc/src/vec/spec_from_elem.rs
@@ -3,7 +3,7 @@ use core::ptr;
use crate::alloc::Allocator;
use crate::raw_vec::RawVec;
-use super::{ExtendElement, IsZero, Vec};
+use super::{IsZero, Vec};
// Specialization trait used for Vec::from_elem
pub(super) trait SpecFromElem: Sized {
@@ -13,7 +13,7 @@ pub(super) trait SpecFromElem: Sized {
impl<T: Clone> SpecFromElem for T {
default fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A> {
let mut v = Vec::with_capacity_in(n, alloc);
- v.extend_with(n, ExtendElement(elem));
+ v.extend_with(n, elem);
v
}
}
@@ -25,7 +25,7 @@ impl<T: Clone + IsZero> SpecFromElem for T {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
}
let mut v = Vec::with_capacity_in(n, alloc);
- v.extend_with(n, ExtendElement(elem));
+ v.extend_with(n, elem);
v
}
}