summaryrefslogtreecommitdiffstats
path: root/library/alloc/src/collections/btree
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /library/alloc/src/collections/btree
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'library/alloc/src/collections/btree')
-rw-r--r--library/alloc/src/collections/btree/append.rs107
-rw-r--r--library/alloc/src/collections/btree/borrow.rs47
-rw-r--r--library/alloc/src/collections/btree/borrow/tests.rs19
-rw-r--r--library/alloc/src/collections/btree/dedup_sorted_iter.rs47
-rw-r--r--library/alloc/src/collections/btree/fix.rs179
-rw-r--r--library/alloc/src/collections/btree/map.rs2423
-rw-r--r--library/alloc/src/collections/btree/map/entry.rs555
-rw-r--r--library/alloc/src/collections/btree/map/tests.rs2338
-rw-r--r--library/alloc/src/collections/btree/mem.rs35
-rw-r--r--library/alloc/src/collections/btree/merge_iter.rs98
-rw-r--r--library/alloc/src/collections/btree/mod.rs26
-rw-r--r--library/alloc/src/collections/btree/navigate.rs719
-rw-r--r--library/alloc/src/collections/btree/node.rs1753
-rw-r--r--library/alloc/src/collections/btree/node/tests.rs102
-rw-r--r--library/alloc/src/collections/btree/remove.rs95
-rw-r--r--library/alloc/src/collections/btree/search.rs285
-rw-r--r--library/alloc/src/collections/btree/set.rs1789
-rw-r--r--library/alloc/src/collections/btree/set/tests.rs856
-rw-r--r--library/alloc/src/collections/btree/set_val.rs29
-rw-r--r--library/alloc/src/collections/btree/split.rs73
-rw-r--r--library/alloc/src/collections/btree/testing/crash_test.rs119
-rw-r--r--library/alloc/src/collections/btree/testing/mod.rs3
-rw-r--r--library/alloc/src/collections/btree/testing/ord_chaos.rs81
-rw-r--r--library/alloc/src/collections/btree/testing/rng.rs28
24 files changed, 11806 insertions, 0 deletions
diff --git a/library/alloc/src/collections/btree/append.rs b/library/alloc/src/collections/btree/append.rs
new file mode 100644
index 000000000..b6989afb6
--- /dev/null
+++ b/library/alloc/src/collections/btree/append.rs
@@ -0,0 +1,107 @@
+use super::merge_iter::MergeIterInner;
+use super::node::{self, Root};
+use core::alloc::Allocator;
+use core::iter::FusedIterator;
+
+impl<K, V> Root<K, V> {
+ /// Appends all key-value pairs from the union of two ascending iterators,
+ /// incrementing a `length` variable along the way. The latter makes it
+ /// easier for the caller to avoid a leak when a drop handler panicks.
+ ///
+ /// If both iterators produce the same key, this method drops the pair from
+ /// the left iterator and appends the pair from the right iterator.
+ ///
+ /// If you want the tree to end up in a strictly ascending order, like for
+ /// a `BTreeMap`, both iterators should produce keys in strictly ascending
+ /// order, each greater than all keys in the tree, including any keys
+ /// already in the tree upon entry.
+ pub fn append_from_sorted_iters<I, A: Allocator + Clone>(
+ &mut self,
+ left: I,
+ right: I,
+ length: &mut usize,
+ alloc: A,
+ ) where
+ K: Ord,
+ I: Iterator<Item = (K, V)> + FusedIterator,
+ {
+ // We prepare to merge `left` and `right` into a sorted sequence in linear time.
+ let iter = MergeIter(MergeIterInner::new(left, right));
+
+ // Meanwhile, we build a tree from the sorted sequence in linear time.
+ self.bulk_push(iter, length, alloc)
+ }
+
+ /// Pushes all key-value pairs to the end of the tree, incrementing a
+ /// `length` variable along the way. The latter makes it easier for the
+ /// caller to avoid a leak when the iterator panicks.
+ pub fn bulk_push<I, A: Allocator + Clone>(&mut self, iter: I, length: &mut usize, alloc: A)
+ where
+ I: Iterator<Item = (K, V)>,
+ {
+ let mut cur_node = self.borrow_mut().last_leaf_edge().into_node();
+ // Iterate through all key-value pairs, pushing them into nodes at the right level.
+ for (key, value) in iter {
+ // Try to push key-value pair into the current leaf node.
+ if cur_node.len() < node::CAPACITY {
+ cur_node.push(key, value);
+ } else {
+ // No space left, go up and push there.
+ let mut open_node;
+ let mut test_node = cur_node.forget_type();
+ loop {
+ match test_node.ascend() {
+ Ok(parent) => {
+ let parent = parent.into_node();
+ if parent.len() < node::CAPACITY {
+ // Found a node with space left, push here.
+ open_node = parent;
+ break;
+ } else {
+ // Go up again.
+ test_node = parent.forget_type();
+ }
+ }
+ Err(_) => {
+ // We are at the top, create a new root node and push there.
+ open_node = self.push_internal_level(alloc.clone());
+ break;
+ }
+ }
+ }
+
+ // Push key-value pair and new right subtree.
+ let tree_height = open_node.height() - 1;
+ let mut right_tree = Root::new(alloc.clone());
+ for _ in 0..tree_height {
+ right_tree.push_internal_level(alloc.clone());
+ }
+ open_node.push(key, value, right_tree);
+
+ // Go down to the right-most leaf again.
+ cur_node = open_node.forget_type().last_leaf_edge().into_node();
+ }
+
+ // Increment length every iteration, to make sure the map drops
+ // the appended elements even if advancing the iterator panicks.
+ *length += 1;
+ }
+ self.fix_right_border_of_plentiful();
+ }
+}
+
+// An iterator for merging two sorted sequences into one
+struct MergeIter<K, V, I: Iterator<Item = (K, V)>>(MergeIterInner<I>);
+
+impl<K: Ord, V, I> Iterator for MergeIter<K, V, I>
+where
+ I: Iterator<Item = (K, V)> + FusedIterator,
+{
+ type Item = (K, V);
+
+ /// If two keys are equal, returns the key-value pair from the right source.
+ fn next(&mut self) -> Option<(K, V)> {
+ let (a_next, b_next) = self.0.nexts(|a: &(K, V), b: &(K, V)| K::cmp(&a.0, &b.0));
+ b_next.or(a_next)
+ }
+}
diff --git a/library/alloc/src/collections/btree/borrow.rs b/library/alloc/src/collections/btree/borrow.rs
new file mode 100644
index 000000000..016f139a5
--- /dev/null
+++ b/library/alloc/src/collections/btree/borrow.rs
@@ -0,0 +1,47 @@
+use core::marker::PhantomData;
+use core::ptr::NonNull;
+
+/// Models a reborrow of some unique reference, when you know that the reborrow
+/// and all its descendants (i.e., all pointers and references derived from it)
+/// will not be used any more at some point, after which you want to use the
+/// original unique reference again.
+///
+/// The borrow checker usually handles this stacking of borrows for you, but
+/// some control flows that accomplish this stacking are too complicated for
+/// the compiler to follow. A `DormantMutRef` allows you to check borrowing
+/// yourself, while still expressing its stacked nature, and encapsulating
+/// the raw pointer code needed to do this without undefined behavior.
+pub struct DormantMutRef<'a, T> {
+ ptr: NonNull<T>,
+ _marker: PhantomData<&'a mut T>,
+}
+
+unsafe impl<'a, T> Sync for DormantMutRef<'a, T> where &'a mut T: Sync {}
+unsafe impl<'a, T> Send for DormantMutRef<'a, T> where &'a mut T: Send {}
+
+impl<'a, T> DormantMutRef<'a, T> {
+ /// Capture a unique borrow, and immediately reborrow it. For the compiler,
+ /// the lifetime of the new reference is the same as the lifetime of the
+ /// original reference, but you promise to use it for a shorter period.
+ pub fn new(t: &'a mut T) -> (&'a mut T, Self) {
+ let ptr = NonNull::from(t);
+ // SAFETY: we hold the borrow throughout 'a via `_marker`, and we expose
+ // only this reference, so it is unique.
+ let new_ref = unsafe { &mut *ptr.as_ptr() };
+ (new_ref, Self { ptr, _marker: PhantomData })
+ }
+
+ /// Revert to the unique borrow initially captured.
+ ///
+ /// # Safety
+ ///
+ /// The reborrow must have ended, i.e., the reference returned by `new` and
+ /// all pointers and references derived from it, must not be used anymore.
+ pub unsafe fn awaken(self) -> &'a mut T {
+ // SAFETY: our own safety conditions imply this reference is again unique.
+ unsafe { &mut *self.ptr.as_ptr() }
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/library/alloc/src/collections/btree/borrow/tests.rs b/library/alloc/src/collections/btree/borrow/tests.rs
new file mode 100644
index 000000000..56a8434fc
--- /dev/null
+++ b/library/alloc/src/collections/btree/borrow/tests.rs
@@ -0,0 +1,19 @@
+use super::DormantMutRef;
+
+#[test]
+fn test_borrow() {
+ let mut data = 1;
+ let mut stack = vec![];
+ let mut rr = &mut data;
+ for factor in [2, 3, 7].iter() {
+ let (r, dormant_r) = DormantMutRef::new(rr);
+ rr = r;
+ assert_eq!(*rr, 1);
+ stack.push((factor, dormant_r));
+ }
+ while let Some((factor, dormant_r)) = stack.pop() {
+ let r = unsafe { dormant_r.awaken() };
+ *r *= factor;
+ }
+ assert_eq!(data, 42);
+}
diff --git a/library/alloc/src/collections/btree/dedup_sorted_iter.rs b/library/alloc/src/collections/btree/dedup_sorted_iter.rs
new file mode 100644
index 000000000..60bf83b83
--- /dev/null
+++ b/library/alloc/src/collections/btree/dedup_sorted_iter.rs
@@ -0,0 +1,47 @@
+use core::iter::Peekable;
+
+/// A iterator for deduping the key of a sorted iterator.
+/// When encountering the duplicated key, only the last key-value pair is yielded.
+///
+/// Used by [`BTreeMap::bulk_build_from_sorted_iter`].
+pub struct DedupSortedIter<K, V, I>
+where
+ I: Iterator<Item = (K, V)>,
+{
+ iter: Peekable<I>,
+}
+
+impl<K, V, I> DedupSortedIter<K, V, I>
+where
+ I: Iterator<Item = (K, V)>,
+{
+ pub fn new(iter: I) -> Self {
+ Self { iter: iter.peekable() }
+ }
+}
+
+impl<K, V, I> Iterator for DedupSortedIter<K, V, I>
+where
+ K: Eq,
+ I: Iterator<Item = (K, V)>,
+{
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ loop {
+ let next = match self.iter.next() {
+ Some(next) => next,
+ None => return None,
+ };
+
+ let peeked = match self.iter.peek() {
+ Some(peeked) => peeked,
+ None => return Some(next),
+ };
+
+ if next.0 != peeked.0 {
+ return Some(next);
+ }
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/fix.rs b/library/alloc/src/collections/btree/fix.rs
new file mode 100644
index 000000000..91b612180
--- /dev/null
+++ b/library/alloc/src/collections/btree/fix.rs
@@ -0,0 +1,179 @@
+use super::map::MIN_LEN;
+use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef, Root};
+use core::alloc::Allocator;
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Stocks up a possibly underfull node by merging with or stealing from a
+ /// sibling. If successful but at the cost of shrinking the parent node,
+ /// returns that shrunk parent node. Returns an `Err` if the node is
+ /// an empty root.
+ fn fix_node_through_parent<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
+ let len = self.len();
+ if len >= MIN_LEN {
+ Ok(None)
+ } else {
+ match self.choose_parent_kv() {
+ Ok(Left(mut left_parent_kv)) => {
+ if left_parent_kv.can_merge() {
+ let parent = left_parent_kv.merge_tracking_parent(alloc);
+ Ok(Some(parent))
+ } else {
+ left_parent_kv.bulk_steal_left(MIN_LEN - len);
+ Ok(None)
+ }
+ }
+ Ok(Right(mut right_parent_kv)) => {
+ if right_parent_kv.can_merge() {
+ let parent = right_parent_kv.merge_tracking_parent(alloc);
+ Ok(Some(parent))
+ } else {
+ right_parent_kv.bulk_steal_right(MIN_LEN - len);
+ Ok(None)
+ }
+ }
+ Err(root) => {
+ if len > 0 {
+ Ok(None)
+ } else {
+ Err(root)
+ }
+ }
+ }
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Stocks up a possibly underfull node, and if that causes its parent node
+ /// to shrink, stocks up the parent, recursively.
+ /// Returns `true` if it fixed the tree, `false` if it couldn't because the
+ /// root node became empty.
+ ///
+ /// This method does not expect ancestors to already be underfull upon entry
+ /// and panics if it encounters an empty ancestor.
+ pub fn fix_node_and_affected_ancestors<A: Allocator + Clone>(mut self, alloc: A) -> bool {
+ loop {
+ match self.fix_node_through_parent(alloc.clone()) {
+ Ok(Some(parent)) => self = parent.forget_type(),
+ Ok(None) => return true,
+ Err(_) => return false,
+ }
+ }
+ }
+}
+
+impl<K, V> Root<K, V> {
+ /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
+ pub fn fix_top<A: Allocator + Clone>(&mut self, alloc: A) {
+ while self.height() > 0 && self.len() == 0 {
+ self.pop_internal_level(alloc.clone());
+ }
+ }
+
+ /// Stocks up or merge away any underfull nodes on the right border of the
+ /// tree. The other nodes, those that are not the root nor a rightmost edge,
+ /// must already have at least MIN_LEN elements.
+ pub fn fix_right_border<A: Allocator + Clone>(&mut self, alloc: A) {
+ self.fix_top(alloc.clone());
+ if self.len() > 0 {
+ self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone());
+ self.fix_top(alloc);
+ }
+ }
+
+ /// The symmetric clone of `fix_right_border`.
+ pub fn fix_left_border<A: Allocator + Clone>(&mut self, alloc: A) {
+ self.fix_top(alloc.clone());
+ if self.len() > 0 {
+ self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone());
+ self.fix_top(alloc);
+ }
+ }
+
+ /// Stocks up any underfull nodes on the right border of the tree.
+ /// The other nodes, those that are neither the root nor a rightmost edge,
+ /// must be prepared to have up to MIN_LEN elements stolen.
+ pub fn fix_right_border_of_plentiful(&mut self) {
+ let mut cur_node = self.borrow_mut();
+ while let Internal(internal) = cur_node.force() {
+ // Check if right-most child is underfull.
+ let mut last_kv = internal.last_kv().consider_for_balancing();
+ debug_assert!(last_kv.left_child_len() >= MIN_LEN * 2);
+ let right_child_len = last_kv.right_child_len();
+ if right_child_len < MIN_LEN {
+ // We need to steal.
+ last_kv.bulk_steal_left(MIN_LEN - right_child_len);
+ }
+
+ // Go further down.
+ cur_node = last_kv.into_right_child();
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
+ fn fix_left_border_of_left_edge<A: Allocator + Clone>(mut self, alloc: A) {
+ while let Internal(internal_kv) = self.force() {
+ self = internal_kv.fix_left_child(alloc.clone()).first_kv();
+ debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
+ }
+ }
+
+ fn fix_right_border_of_right_edge<A: Allocator + Clone>(mut self, alloc: A) {
+ while let Internal(internal_kv) = self.force() {
+ self = internal_kv.fix_right_child(alloc.clone()).last_kv();
+ debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ /// Stocks up the left child, assuming the right child isn't underfull, and
+ /// provisions an extra element to allow merging its children in turn
+ /// without becoming underfull.
+ /// Returns the left child.
+ fn fix_left_child<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ let mut internal_kv = self.consider_for_balancing();
+ let left_len = internal_kv.left_child_len();
+ debug_assert!(internal_kv.right_child_len() >= MIN_LEN);
+ if internal_kv.can_merge() {
+ internal_kv.merge_tracking_child(alloc)
+ } else {
+ // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
+ let count = (MIN_LEN + 1).saturating_sub(left_len);
+ if count > 0 {
+ internal_kv.bulk_steal_right(count);
+ }
+ internal_kv.into_left_child()
+ }
+ }
+
+ /// Stocks up the right child, assuming the left child isn't underfull, and
+ /// provisions an extra element to allow merging its children in turn
+ /// without becoming underfull.
+ /// Returns wherever the right child ended up.
+ fn fix_right_child<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ let mut internal_kv = self.consider_for_balancing();
+ let right_len = internal_kv.right_child_len();
+ debug_assert!(internal_kv.left_child_len() >= MIN_LEN);
+ if internal_kv.can_merge() {
+ internal_kv.merge_tracking_child(alloc)
+ } else {
+ // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
+ let count = (MIN_LEN + 1).saturating_sub(right_len);
+ if count > 0 {
+ internal_kv.bulk_steal_left(count);
+ }
+ internal_kv.into_right_child()
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
new file mode 100644
index 000000000..cacbd54b6
--- /dev/null
+++ b/library/alloc/src/collections/btree/map.rs
@@ -0,0 +1,2423 @@
+use crate::vec::Vec;
+use core::borrow::Borrow;
+use core::cmp::Ordering;
+use core::fmt::{self, Debug};
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator};
+use core::marker::PhantomData;
+use core::mem::{self, ManuallyDrop};
+use core::ops::{Index, RangeBounds};
+use core::ptr;
+
+use crate::alloc::{Allocator, Global};
+
+use super::borrow::DormantMutRef;
+use super::dedup_sorted_iter::DedupSortedIter;
+use super::navigate::{LazyLeafRange, LeafRange};
+use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root};
+use super::search::SearchResult::*;
+use super::set_val::SetValZST;
+
+mod entry;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use entry::{Entry, OccupiedEntry, OccupiedError, VacantEntry};
+
+use Entry::*;
+
+/// Minimum number of elements in a node that is not a root.
+/// We might temporarily have fewer elements during methods.
+pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
+
+// A tree in a `BTreeMap` is a tree in the `node` module with additional invariants:
+// - Keys must appear in ascending order (according to the key's type).
+// - Every non-leaf node contains at least 1 element (has at least 2 children).
+// - Every non-root node contains at least MIN_LEN elements.
+//
+// An empty map is represented either by the absence of a root node or by a
+// root node that is an empty leaf.
+
+/// An ordered map based on a [B-Tree].
+///
+/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
+/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal
+/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of
+/// comparisons necessary to find an element (log<sub>2</sub>n). However, in practice the way this
+/// is done is *very* inefficient for modern computer architectures. In particular, every element
+/// is stored in its own individually heap-allocated node. This means that every single insertion
+/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these
+/// are both notably expensive things to do in practice, we are forced to at very least reconsider
+/// the BST strategy.
+///
+/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
+/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
+/// searches. However, this does mean that searches will have to do *more* comparisons on average.
+/// The precise number of comparisons depends on the node search strategy used. For optimal cache
+/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
+/// the node using binary search. As a compromise, one could also perform a linear search
+/// that initially only checks every i<sup>th</sup> element for some choice of i.
+///
+/// Currently, our implementation simply performs naive linear search. This provides excellent
+/// performance on *small* nodes of elements which are cheap to compare. However in the future we
+/// would like to further explore choosing the optimal search strategy based on the choice of B,
+/// and possibly other factors. Using linear search, searching for a random element is expected
+/// to take B * log(n) comparisons, which is generally worse than a BST. In practice,
+/// however, performance is excellent.
+///
+/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
+/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+/// The behavior resulting from such a logic error is not specified, but will be encapsulated to the
+/// `BTreeMap` that observed the logic error and not result in undefined behavior. This could
+/// include panics, incorrect results, aborts, memory leaks, and non-termination.
+///
+/// Iterators obtained from functions such as [`BTreeMap::iter`], [`BTreeMap::values`], or
+/// [`BTreeMap::keys`] produce their items in order by key, and take worst-case logarithmic and
+/// amortized constant time per item returned.
+///
+/// [B-Tree]: https://en.wikipedia.org/wiki/B-tree
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, &str>` in this example).
+/// let mut movie_reviews = BTreeMap::new();
+///
+/// // review some movies.
+/// movie_reviews.insert("Office Space", "Deals with real issues in the workplace.");
+/// movie_reviews.insert("Pulp Fiction", "Masterpiece.");
+/// movie_reviews.insert("The Godfather", "Very enjoyable.");
+/// movie_reviews.insert("The Blues Brothers", "Eye lyked it a lot.");
+///
+/// // check for a specific one.
+/// if !movie_reviews.contains_key("Les Misérables") {
+/// println!("We've got {} reviews, but Les Misérables ain't one.",
+/// movie_reviews.len());
+/// }
+///
+/// // oops, this review has a lot of spelling mistakes, let's delete it.
+/// movie_reviews.remove("The Blues Brothers");
+///
+/// // look up the values associated with some keys.
+/// let to_find = ["Up!", "Office Space"];
+/// for movie in &to_find {
+/// match movie_reviews.get(movie) {
+/// Some(review) => println!("{movie}: {review}"),
+/// None => println!("{movie} is unreviewed.")
+/// }
+/// }
+///
+/// // Look up the value for a key (will panic if the key is not found).
+/// println!("Movie review: {}", movie_reviews["Office Space"]);
+///
+/// // iterate over everything.
+/// for (movie, review) in &movie_reviews {
+/// println!("{movie}: \"{review}\"");
+/// }
+/// ```
+///
+/// A `BTreeMap` with a known list of items can be initialized from an array:
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// let solar_distance = BTreeMap::from([
+/// ("Mercury", 0.4),
+/// ("Venus", 0.7),
+/// ("Earth", 1.0),
+/// ("Mars", 1.5),
+/// ]);
+/// ```
+///
+/// `BTreeMap` implements an [`Entry API`], which allows for complex
+/// methods of getting, setting, updating and removing keys and their values:
+///
+/// [`Entry API`]: BTreeMap::entry
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, u8>` in this example).
+/// let mut player_stats = BTreeMap::new();
+///
+/// fn random_stat_buff() -> u8 {
+/// // could actually return some random value here - let's just return
+/// // some fixed value for now
+/// 42
+/// }
+///
+/// // insert a key only if it doesn't already exist
+/// player_stats.entry("health").or_insert(100);
+///
+/// // insert a key using a function that provides a new value only if it
+/// // doesn't already exist
+/// player_stats.entry("defence").or_insert_with(random_stat_buff);
+///
+/// // update a key, guarding against the key possibly not being set
+/// let stat = player_stats.entry("attack").or_insert(100);
+/// *stat += random_stat_buff();
+///
+/// // modify an entry before an insert with in-place mutation
+/// player_stats.entry("mana").and_modify(|mana| *mana += 200).or_insert(100);
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeMap")]
+#[rustc_insignificant_dtor]
+pub struct BTreeMap<
+ K,
+ V,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ root: Option<Root<K, V>>,
+ length: usize,
+ /// `ManuallyDrop` to control drop order (needs to be dropped after all the nodes).
+ pub(super) alloc: ManuallyDrop<A>,
+ // For dropck; the `Box` avoids making the `Unpin` impl more strict than before
+ _marker: PhantomData<crate::boxed::Box<(K, V)>>,
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+unsafe impl<#[may_dangle] K, #[may_dangle] V, A: Allocator + Clone> Drop for BTreeMap<K, V, A> {
+ fn drop(&mut self) {
+ drop(unsafe { ptr::read(self) }.into_iter())
+ }
+}
+
+// FIXME: This implementation is "wrong", but changing it would be a breaking change.
+// (The bounds of the automatic `UnwindSafe` implementation have been like this since Rust 1.50.)
+// Maybe we can fix it nonetheless with a crater run, or if the `UnwindSafe`
+// traits are deprecated, or disarmed (no longer causing hard errors) in the future.
+#[stable(feature = "btree_unwindsafe", since = "1.64.0")]
+impl<K, V, A: Allocator + Clone> core::panic::UnwindSafe for BTreeMap<K, V, A>
+where
+ A: core::panic::UnwindSafe,
+ K: core::panic::RefUnwindSafe,
+ V: core::panic::RefUnwindSafe,
+{
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {
+ fn clone(&self) -> BTreeMap<K, V, A> {
+ fn clone_subtree<'a, K: Clone, V: Clone, A: Allocator + Clone>(
+ node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
+ alloc: A,
+ ) -> BTreeMap<K, V, A>
+ where
+ K: 'a,
+ V: 'a,
+ {
+ match node.force() {
+ Leaf(leaf) => {
+ let mut out_tree = BTreeMap {
+ root: Some(Root::new(alloc.clone())),
+ length: 0,
+ alloc: ManuallyDrop::new(alloc),
+ _marker: PhantomData,
+ };
+
+ {
+ let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped
+ let mut out_node = match root.borrow_mut().force() {
+ Leaf(leaf) => leaf,
+ Internal(_) => unreachable!(),
+ };
+
+ let mut in_edge = leaf.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ out_node.push(k.clone(), v.clone());
+ out_tree.length += 1;
+ }
+ }
+
+ out_tree
+ }
+ Internal(internal) => {
+ let mut out_tree =
+ clone_subtree(internal.first_edge().descend(), alloc.clone());
+
+ {
+ let out_root = out_tree.root.as_mut().unwrap();
+ let mut out_node = out_root.push_internal_level(alloc.clone());
+ let mut in_edge = internal.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ let k = (*k).clone();
+ let v = (*v).clone();
+ let subtree = clone_subtree(in_edge.descend(), alloc.clone());
+
+ // We can't destructure subtree directly
+ // because BTreeMap implements Drop
+ let (subroot, sublength) = unsafe {
+ let subtree = ManuallyDrop::new(subtree);
+ let root = ptr::read(&subtree.root);
+ let length = subtree.length;
+ (root, length)
+ };
+
+ out_node.push(
+ k,
+ v,
+ subroot.unwrap_or_else(|| Root::new(alloc.clone())),
+ );
+ out_tree.length += 1 + sublength;
+ }
+ }
+
+ out_tree
+ }
+ }
+ }
+
+ if self.is_empty() {
+ BTreeMap::new_in((*self.alloc).clone())
+ } else {
+ clone_subtree(self.root.as_ref().unwrap().reborrow(), (*self.alloc).clone()) // unwrap succeeds because not empty
+ }
+ }
+}
+
+impl<K, Q: ?Sized, A: Allocator + Clone> super::Recover<Q> for BTreeMap<K, SetValZST, A>
+where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+{
+ type Key = K;
+
+ fn get(&self, key: &Q) -> Option<&K> {
+ let root_node = self.root.as_ref()?.reborrow();
+ match root_node.search_tree(key) {
+ Found(handle) => Some(handle.into_kv().0),
+ GoDown(_) => None,
+ }
+ }
+
+ fn take(&mut self, key: &Q) -> Option<K> {
+ let (map, dormant_map) = DormantMutRef::new(self);
+ let root_node = map.root.as_mut()?.borrow_mut();
+ match root_node.search_tree(key) {
+ Found(handle) => Some(
+ OccupiedEntry {
+ handle,
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ }
+ .remove_kv()
+ .0,
+ ),
+ GoDown(_) => None,
+ }
+ }
+
+ fn replace(&mut self, key: K) -> Option<K> {
+ let (map, dormant_map) = DormantMutRef::new(self);
+ let root_node =
+ map.root.get_or_insert_with(|| Root::new((*map.alloc).clone())).borrow_mut();
+ match root_node.search_tree::<K>(&key) {
+ Found(mut kv) => Some(mem::replace(kv.key_mut(), key)),
+ GoDown(handle) => {
+ VacantEntry {
+ key,
+ handle: Some(handle),
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ }
+ .insert(SetValZST::default());
+ None
+ }
+ }
+ }
+}
+
+/// An iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: BTreeMap::iter
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, K: 'a, V: 'a> {
+ range: LazyLeafRange<marker::Immut<'a>, K, V>,
+ length: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: BTreeMap::iter_mut
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IterMut<'a, K: 'a, V: 'a> {
+ range: LazyLeafRange<marker::ValMut<'a>, K, V>,
+ length: usize,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IterMut<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let range = Iter { range: self.range.reborrow(), length: self.length };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+/// An owning iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`]
+/// (provided by the [`IntoIterator`] trait). See its documentation for more.
+///
+/// [`into_iter`]: IntoIterator::into_iter
+/// [`IntoIterator`]: core::iter::IntoIterator
+#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_insignificant_dtor]
+pub struct IntoIter<
+ K,
+ V,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ range: LazyLeafRange<marker::Dying, K, V>,
+ length: usize,
+ /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
+ alloc: A,
+}
+
+impl<K, V, A: Allocator + Clone> IntoIter<K, V, A> {
+ /// Returns an iterator of references over the remaining items.
+ #[inline]
+ pub(super) fn iter(&self) -> Iter<'_, K, V> {
+ Iter { range: self.range.reborrow(), length: self.length }
+ }
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: Debug, V: Debug, A: Allocator + Clone> Debug for IntoIter<K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.iter()).finish()
+ }
+}
+
+/// An iterator over the keys of a `BTreeMap`.
+///
+/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`keys`]: BTreeMap::keys
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Keys<'a, K, V> {
+ inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// An iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values`]: BTreeMap::values
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Values<'a, K, V> {
+ inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values_mut`]: BTreeMap::values_mut
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+pub struct ValuesMut<'a, K, V> {
+ inner: IterMut<'a, K, V>,
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<K, V: fmt::Debug> fmt::Debug for ValuesMut<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
+ }
+}
+
+/// An owning iterator over the keys of a `BTreeMap`.
+///
+/// This `struct` is created by the [`into_keys`] method on [`BTreeMap`].
+/// See its documentation for more.
+///
+/// [`into_keys`]: BTreeMap::into_keys
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+pub struct IntoKeys<K, V, A: Allocator + Clone = Global> {
+ inner: IntoIter<K, V, A>,
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K: fmt::Debug, V, A: Allocator + Clone> fmt::Debug for IntoKeys<K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.inner.iter().map(|(key, _)| key)).finish()
+ }
+}
+
+/// An owning iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`into_values`] method on [`BTreeMap`].
+/// See its documentation for more.
+///
+/// [`into_values`]: BTreeMap::into_values
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+pub struct IntoValues<
+ K,
+ V,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ inner: IntoIter<K, V, A>,
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V: fmt::Debug, A: Allocator + Clone> fmt::Debug for IntoValues<K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
+ }
+}
+
+/// An iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range`]: BTreeMap::range
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, K: 'a, V: 'a> {
+ inner: LeafRange<marker::Immut<'a>, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Range<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range_mut`]: BTreeMap::range_mut
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct RangeMut<'a, K: 'a, V: 'a> {
+ inner: LeafRange<marker::ValMut<'a>, K, V>,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let range = Range { inner: self.inner.reborrow() };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+impl<K, V> BTreeMap<K, V> {
+ /// Makes a new, empty `BTreeMap`.
+ ///
+ /// Does not allocate anything on its own.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ ///
+ /// // entries can now be inserted into the empty map
+ /// map.insert(1, "a");
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[must_use]
+ pub const fn new() -> BTreeMap<K, V> {
+ BTreeMap { root: None, length: 0, alloc: ManuallyDrop::new(Global), _marker: PhantomData }
+ }
+}
+
+impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
+ /// Clears the map, removing all elements.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.clear();
+ /// assert!(a.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ // avoid moving the allocator
+ mem::drop(BTreeMap {
+ root: mem::replace(&mut self.root, None),
+ length: mem::replace(&mut self.length, 0),
+ alloc: self.alloc.clone(),
+ _marker: PhantomData,
+ });
+ }
+
+ /// Makes a new empty BTreeMap with a reasonable choice for B.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// # #![feature(allocator_api)]
+ /// # #![feature(btreemap_alloc)]
+ /// use std::collections::BTreeMap;
+ /// use std::alloc::Global;
+ ///
+ /// let mut map = BTreeMap::new_in(Global);
+ ///
+ /// // entries can now be inserted into the empty map
+ /// map.insert(1, "a");
+ /// ```
+ #[unstable(feature = "btreemap_alloc", issue = "32838")]
+ pub fn new_in(alloc: A) -> BTreeMap<K, V, A> {
+ BTreeMap { root: None, length: 0, alloc: ManuallyDrop::new(alloc), _marker: PhantomData }
+ }
+}
+
+impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let root_node = self.root.as_ref()?.reborrow();
+ match root_node.search_tree(key) {
+ Found(handle) => Some(handle.into_kv().1),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns the key-value pair corresponding to the supplied key.
+ ///
+ /// The supplied key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+ /// assert_eq!(map.get_key_value(&2), None);
+ /// ```
+ #[stable(feature = "map_get_key_value", since = "1.40.0")]
+ pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let root_node = self.root.as_ref()?.reborrow();
+ match root_node.search_tree(k) {
+ Found(handle) => Some(handle.into_kv()),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns the first key-value pair in the map.
+ /// The key in this pair is the minimum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// assert_eq!(map.first_key_value(), None);
+ /// map.insert(1, "b");
+ /// map.insert(2, "a");
+ /// assert_eq!(map.first_key_value(), Some((&1, &"b")));
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn first_key_value(&self) -> Option<(&K, &V)>
+ where
+ K: Ord,
+ {
+ let root_node = self.root.as_ref()?.reborrow();
+ root_node.first_leaf_edge().right_kv().ok().map(Handle::into_kv)
+ }
+
+ /// Returns the first entry in the map for in-place manipulation.
+ /// The key of this entry is the minimum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// if let Some(mut entry) = map.first_entry() {
+ /// if *entry.key() > 0 {
+ /// entry.insert("first");
+ /// }
+ /// }
+ /// assert_eq!(*map.get(&1).unwrap(), "first");
+ /// assert_eq!(*map.get(&2).unwrap(), "b");
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn first_entry(&mut self) -> Option<OccupiedEntry<'_, K, V, A>>
+ where
+ K: Ord,
+ {
+ let (map, dormant_map) = DormantMutRef::new(self);
+ let root_node = map.root.as_mut()?.borrow_mut();
+ let kv = root_node.first_leaf_edge().right_kv().ok()?;
+ Some(OccupiedEntry {
+ handle: kv.forget_node_type(),
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ })
+ }
+
+ /// Removes and returns the first element in the map.
+ /// The key of this element is the minimum key that was in the map.
+ ///
+ /// # Examples
+ ///
+ /// Draining elements in ascending order, while keeping a usable map each iteration.
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// while let Some((key, _val)) = map.pop_first() {
+ /// assert!(map.iter().all(|(k, _v)| *k > key));
+ /// }
+ /// assert!(map.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_first(&mut self) -> Option<(K, V)>
+ where
+ K: Ord,
+ {
+ self.first_entry().map(|entry| entry.remove_entry())
+ }
+
+ /// Returns the last key-value pair in the map.
+ /// The key in this pair is the maximum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "b");
+ /// map.insert(2, "a");
+ /// assert_eq!(map.last_key_value(), Some((&2, &"a")));
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn last_key_value(&self) -> Option<(&K, &V)>
+ where
+ K: Ord,
+ {
+ let root_node = self.root.as_ref()?.reborrow();
+ root_node.last_leaf_edge().left_kv().ok().map(Handle::into_kv)
+ }
+
+ /// Returns the last entry in the map for in-place manipulation.
+ /// The key of this entry is the maximum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// if let Some(mut entry) = map.last_entry() {
+ /// if *entry.key() > 0 {
+ /// entry.insert("last");
+ /// }
+ /// }
+ /// assert_eq!(*map.get(&1).unwrap(), "a");
+ /// assert_eq!(*map.get(&2).unwrap(), "last");
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn last_entry(&mut self) -> Option<OccupiedEntry<'_, K, V, A>>
+ where
+ K: Ord,
+ {
+ let (map, dormant_map) = DormantMutRef::new(self);
+ let root_node = map.root.as_mut()?.borrow_mut();
+ let kv = root_node.last_leaf_edge().left_kv().ok()?;
+ Some(OccupiedEntry {
+ handle: kv.forget_node_type(),
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ })
+ }
+
+ /// Removes and returns the last element in the map.
+ /// The key of this element is the maximum key that was in the map.
+ ///
+ /// # Examples
+ ///
+ /// Draining elements in descending order, while keeping a usable map each iteration.
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// while let Some((key, _val)) = map.pop_last() {
+ /// assert!(map.iter().all(|(k, _v)| *k < key));
+ /// }
+ /// assert!(map.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_last(&mut self) -> Option<(K, V)>
+ where
+ K: Ord,
+ {
+ self.last_entry().map(|entry| entry.remove_entry())
+ }
+
+ /// Returns `true` if the map contains a value for the specified key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.contains_key(&1), true);
+ /// assert_eq!(map.contains_key(&2), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ self.get(key).is_some()
+ }
+
+ /// Returns a mutable reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// if let Some(x) = map.get_mut(&1) {
+ /// *x = "b";
+ /// }
+ /// assert_eq!(map[&1], "b");
+ /// ```
+ // See `get` for implementation notes, this is basically a copy-paste with mut's added
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let root_node = self.root.as_mut()?.borrow_mut();
+ match root_node.search_tree(key) {
+ Found(handle) => Some(handle.into_val_mut()),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Inserts a key-value pair into the map.
+ ///
+ /// If the map did not have this key present, `None` is returned.
+ ///
+ /// If the map did have this key present, the value is updated, and the old
+ /// value is returned. The key is not updated, though; this matters for
+ /// types that can be `==` without being identical. See the [module-level
+ /// documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// assert_eq!(map.insert(37, "a"), None);
+ /// assert_eq!(map.is_empty(), false);
+ ///
+ /// map.insert(37, "b");
+ /// assert_eq!(map.insert(37, "c"), Some("b"));
+ /// assert_eq!(map[&37], "c");
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, key: K, value: V) -> Option<V>
+ where
+ K: Ord,
+ {
+ match self.entry(key) {
+ Occupied(mut entry) => Some(entry.insert(value)),
+ Vacant(entry) => {
+ entry.insert(value);
+ None
+ }
+ }
+ }
+
+ /// Tries to insert a key-value pair into the map, and returns
+ /// a mutable reference to the value in the entry.
+ ///
+ /// If the map already had this key present, nothing is updated, and
+ /// an error containing the occupied entry and the value is returned.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_try_insert)]
+ ///
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// assert_eq!(map.try_insert(37, "a").unwrap(), &"a");
+ ///
+ /// let err = map.try_insert(37, "b").unwrap_err();
+ /// assert_eq!(err.entry.key(), &37);
+ /// assert_eq!(err.entry.get(), &"a");
+ /// assert_eq!(err.value, "b");
+ /// ```
+ #[unstable(feature = "map_try_insert", issue = "82766")]
+ pub fn try_insert(&mut self, key: K, value: V) -> Result<&mut V, OccupiedError<'_, K, V, A>>
+ where
+ K: Ord,
+ {
+ match self.entry(key) {
+ Occupied(entry) => Err(OccupiedError { entry, value }),
+ Vacant(entry) => Ok(entry.insert(value)),
+ }
+ }
+
+ /// Removes a key from the map, returning the value at the key if the key
+ /// was previously in the map.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.remove(&1), Some("a"));
+ /// assert_eq!(map.remove(&1), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ self.remove_entry(key).map(|(_, v)| v)
+ }
+
+ /// Removes a key from the map, returning the stored key and value if the key
+ /// was previously in the map.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.remove_entry(&1), Some((1, "a")));
+ /// assert_eq!(map.remove_entry(&1), None);
+ /// ```
+ #[stable(feature = "btreemap_remove_entry", since = "1.45.0")]
+ pub fn remove_entry<Q: ?Sized>(&mut self, key: &Q) -> Option<(K, V)>
+ where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ let (map, dormant_map) = DormantMutRef::new(self);
+ let root_node = map.root.as_mut()?.borrow_mut();
+ match root_node.search_tree(key) {
+ Found(handle) => Some(
+ OccupiedEntry {
+ handle,
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ }
+ .remove_entry(),
+ ),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all pairs `(k, v)` for which `f(&k, &mut v)` returns `false`.
+ /// The elements are visited in ascending key order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x*10)).collect();
+ /// // Keep only the elements with even-numbered keys.
+ /// map.retain(|&k, _| k % 2 == 0);
+ /// assert!(map.into_iter().eq(vec![(0, 0), (2, 20), (4, 40), (6, 60)]));
+ /// ```
+ #[inline]
+ #[stable(feature = "btree_retain", since = "1.53.0")]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ K: Ord,
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ self.drain_filter(|k, v| !f(k, v));
+ }
+
+ /// Moves all elements from `other` into `self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ ///
+ /// let mut b = BTreeMap::new();
+ /// b.insert(3, "d");
+ /// b.insert(4, "e");
+ /// b.insert(5, "f");
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ /// assert_eq!(a[&3], "d");
+ /// assert_eq!(a[&4], "e");
+ /// assert_eq!(a[&5], "f");
+ /// ```
+ #[stable(feature = "btree_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self)
+ where
+ K: Ord,
+ A: Clone,
+ {
+ // Do we have to append anything at all?
+ if other.is_empty() {
+ return;
+ }
+
+ // We can just swap `self` and `other` if `self` is empty.
+ if self.is_empty() {
+ mem::swap(self, other);
+ return;
+ }
+
+ let self_iter = mem::replace(self, Self::new_in((*self.alloc).clone())).into_iter();
+ let other_iter = mem::replace(other, Self::new_in((*self.alloc).clone())).into_iter();
+ let root = self.root.get_or_insert_with(|| Root::new((*self.alloc).clone()));
+ root.append_from_sorted_iters(
+ self_iter,
+ other_iter,
+ &mut self.length,
+ (*self.alloc).clone(),
+ )
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "a");
+ /// map.insert(5, "b");
+ /// map.insert(8, "c");
+ /// for (&key, &value) in map.range((Included(&4), Included(&8))) {
+ /// println!("{key}: {value}");
+ /// }
+ /// assert_eq!(Some((&5, &"b")), map.range(4..).next());
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range<T: ?Sized, R>(&self, range: R) -> Range<'_, K, V>
+ where
+ T: Ord,
+ K: Borrow<T> + Ord,
+ R: RangeBounds<T>,
+ {
+ if let Some(root) = &self.root {
+ Range { inner: root.reborrow().range_search(range) }
+ } else {
+ Range { inner: LeafRange::none() }
+ }
+ }
+
+ /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, i32> =
+ /// [("Alice", 0), ("Bob", 0), ("Carol", 0), ("Cheryl", 0)].into();
+ /// for (_, balance) in map.range_mut("B".."Cheryl") {
+ /// *balance += 100;
+ /// }
+ /// for (name, balance) in &map {
+ /// println!("{name} => {balance}");
+ /// }
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<'_, K, V>
+ where
+ T: Ord,
+ K: Borrow<T> + Ord,
+ R: RangeBounds<T>,
+ {
+ if let Some(root) = &mut self.root {
+ RangeMut { inner: root.borrow_valmut().range_search(range), _marker: PhantomData }
+ } else {
+ RangeMut { inner: LeafRange::none(), _marker: PhantomData }
+ }
+ }
+
+ /// Gets the given key's corresponding entry in the map for in-place manipulation.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// // count the number of occurrences of letters in the vec
+ /// for x in ["a", "b", "a", "c", "a", "b"] {
+ /// count.entry(x).and_modify(|curr| *curr += 1).or_insert(1);
+ /// }
+ ///
+ /// assert_eq!(count["a"], 3);
+ /// assert_eq!(count["b"], 2);
+ /// assert_eq!(count["c"], 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn entry(&mut self, key: K) -> Entry<'_, K, V, A>
+ where
+ K: Ord,
+ {
+ let (map, dormant_map) = DormantMutRef::new(self);
+ match map.root {
+ None => Vacant(VacantEntry {
+ key,
+ handle: None,
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ }),
+ Some(ref mut root) => match root.borrow_mut().search_tree(&key) {
+ Found(handle) => Occupied(OccupiedEntry {
+ handle,
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ }),
+ GoDown(handle) => Vacant(VacantEntry {
+ key,
+ handle: Some(handle),
+ dormant_map,
+ alloc: (*map.alloc).clone(),
+ _marker: PhantomData,
+ }),
+ },
+ }
+ }
+
+ /// Splits the collection into two at the given key. Returns everything after the given key,
+ /// including the key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(17, "d");
+ /// a.insert(41, "e");
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ ///
+ /// assert_eq!(b[&3], "c");
+ /// assert_eq!(b[&17], "d");
+ /// assert_eq!(b[&41], "e");
+ /// ```
+ #[stable(feature = "btree_split_off", since = "1.11.0")]
+ pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
+ where
+ K: Borrow<Q> + Ord,
+ A: Clone,
+ {
+ if self.is_empty() {
+ return Self::new_in((*self.alloc).clone());
+ }
+
+ let total_num = self.len();
+ let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty
+
+ let right_root = left_root.split_off(key, (*self.alloc).clone());
+
+ let (new_left_len, right_len) = Root::calc_split_length(total_num, &left_root, &right_root);
+ self.length = new_left_len;
+
+ BTreeMap {
+ root: Some(right_root),
+ length: right_len,
+ alloc: self.alloc.clone(),
+ _marker: PhantomData,
+ }
+ }
+
+ /// Creates an iterator that visits all elements (key-value pairs) in
+ /// ascending key order and uses a closure to determine if an element should
+ /// be removed. If the closure returns `true`, the element is removed from
+ /// the map and yielded. If the closure returns `false`, or panics, the
+ /// element remains in the map and will not be yielded.
+ ///
+ /// The iterator also lets you mutate the value of each element in the
+ /// closure, regardless of whether you choose to keep or remove it.
+ ///
+ /// If the iterator is only partially consumed or not consumed at all, each
+ /// of the remaining elements is still subjected to the closure, which may
+ /// change its value and, by returning `true`, have the element removed and
+ /// dropped.
+ ///
+ /// It is unspecified how many more elements will be subjected to the
+ /// closure if a panic occurs in the closure, or a panic occurs while
+ /// dropping an element, or if the `DrainFilter` value is leaked.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a map into even and odd keys, reusing the original map:
+ ///
+ /// ```
+ /// #![feature(btree_drain_filter)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+ /// let evens: BTreeMap<_, _> = map.drain_filter(|k, _v| k % 2 == 0).collect();
+ /// let odds = map;
+ /// assert_eq!(evens.keys().copied().collect::<Vec<_>>(), [0, 2, 4, 6]);
+ /// assert_eq!(odds.keys().copied().collect::<Vec<_>>(), [1, 3, 5, 7]);
+ /// ```
+ #[unstable(feature = "btree_drain_filter", issue = "70530")]
+ pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F, A>
+ where
+ K: Ord,
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ let (inner, alloc) = self.drain_filter_inner();
+ DrainFilter { pred, inner, alloc }
+ }
+
+ pub(super) fn drain_filter_inner(&mut self) -> (DrainFilterInner<'_, K, V>, A)
+ where
+ K: Ord,
+ {
+ if let Some(root) = self.root.as_mut() {
+ let (root, dormant_root) = DormantMutRef::new(root);
+ let front = root.borrow_mut().first_leaf_edge();
+ (
+ DrainFilterInner {
+ length: &mut self.length,
+ dormant_root: Some(dormant_root),
+ cur_leaf_edge: Some(front),
+ },
+ (*self.alloc).clone(),
+ )
+ } else {
+ (
+ DrainFilterInner {
+ length: &mut self.length,
+ dormant_root: None,
+ cur_leaf_edge: None,
+ },
+ (*self.alloc).clone(),
+ )
+ }
+ }
+
+ /// Creates a consuming iterator visiting all the keys, in sorted order.
+ /// The map cannot be used after calling this.
+ /// The iterator element type is `K`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(2, "b");
+ /// a.insert(1, "a");
+ ///
+ /// let keys: Vec<i32> = a.into_keys().collect();
+ /// assert_eq!(keys, [1, 2]);
+ /// ```
+ #[inline]
+ #[stable(feature = "map_into_keys_values", since = "1.54.0")]
+ pub fn into_keys(self) -> IntoKeys<K, V, A> {
+ IntoKeys { inner: self.into_iter() }
+ }
+
+ /// Creates a consuming iterator visiting all the values, in order by key.
+ /// The map cannot be used after calling this.
+ /// The iterator element type is `V`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "hello");
+ /// a.insert(2, "goodbye");
+ ///
+ /// let values: Vec<&str> = a.into_values().collect();
+ /// assert_eq!(values, ["hello", "goodbye"]);
+ /// ```
+ #[inline]
+ #[stable(feature = "map_into_keys_values", since = "1.54.0")]
+ pub fn into_values(self) -> IntoValues<K, V, A> {
+ IntoValues { inner: self.into_iter() }
+ }
+
+ /// Makes a `BTreeMap` from a sorted iterator.
+ pub(crate) fn bulk_build_from_sorted_iter<I>(iter: I, alloc: A) -> Self
+ where
+ K: Ord,
+ I: IntoIterator<Item = (K, V)>,
+ {
+ let mut root = Root::new(alloc.clone());
+ let mut length = 0;
+ root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone());
+ BTreeMap { root: Some(root), length, alloc: ManuallyDrop::new(alloc), _marker: PhantomData }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V, A: Allocator + Clone> IntoIterator for &'a BTreeMap<K, V, A> {
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ fn into_iter(self) -> Iter<'a, K, V> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.range.next_unchecked() })
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Iter<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.range.next_back_unchecked() })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Iter<'_, K, V> {
+ fn clone(&self) -> Self {
+ Iter { range: self.range.clone(), length: self.length }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V, A: Allocator + Clone> IntoIterator for &'a mut BTreeMap<K, V, A> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
+
+ fn into_iter(self) -> IterMut<'a, K, V> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for IterMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.range.next_unchecked() })
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.range.next_back_unchecked() })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for IterMut<'_, K, V> {}
+
+impl<'a, K, V> IterMut<'a, K, V> {
+ /// Returns an iterator of references over the remaining items.
+ #[inline]
+ pub(super) fn iter(&self) -> Iter<'_, K, V> {
+ Iter { range: self.range.reborrow(), length: self.length }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V, A: Allocator + Clone> IntoIterator for BTreeMap<K, V, A> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V, A>;
+
+ fn into_iter(self) -> IntoIter<K, V, A> {
+ let mut me = ManuallyDrop::new(self);
+ if let Some(root) = me.root.take() {
+ let full_range = root.into_dying().full_range();
+
+ IntoIter {
+ range: full_range,
+ length: me.length,
+ alloc: unsafe { ManuallyDrop::take(&mut me.alloc) },
+ }
+ } else {
+ IntoIter {
+ range: LazyLeafRange::none(),
+ length: 0,
+ alloc: unsafe { ManuallyDrop::take(&mut me.alloc) },
+ }
+ }
+ }
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+impl<K, V, A: Allocator + Clone> Drop for IntoIter<K, V, A> {
+ fn drop(&mut self) {
+ struct DropGuard<'a, K, V, A: Allocator + Clone>(&'a mut IntoIter<K, V, A>);
+
+ impl<'a, K, V, A: Allocator + Clone> Drop for DropGuard<'a, K, V, A> {
+ fn drop(&mut self) {
+ // Continue the same loop we perform below. This only runs when unwinding, so we
+ // don't have to care about panics this time (they'll abort).
+ while let Some(kv) = self.0.dying_next() {
+ // SAFETY: we consume the dying handle immediately.
+ unsafe { kv.drop_key_val() };
+ }
+ }
+ }
+
+ while let Some(kv) = self.dying_next() {
+ let guard = DropGuard(self);
+ // SAFETY: we don't touch the tree before consuming the dying handle.
+ unsafe { kv.drop_key_val() };
+ mem::forget(guard);
+ }
+ }
+}
+
+impl<K, V, A: Allocator + Clone> IntoIter<K, V, A> {
+ /// Core of a `next` method returning a dying KV handle,
+ /// invalidated by further calls to this function and some others.
+ fn dying_next(
+ &mut self,
+ ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
+ if self.length == 0 {
+ self.range.deallocating_end(self.alloc.clone());
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.range.deallocating_next_unchecked(self.alloc.clone()) })
+ }
+ }
+
+ /// Core of a `next_back` method returning a dying KV handle,
+ /// invalidated by further calls to this function and some others.
+ fn dying_next_back(
+ &mut self,
+ ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
+ if self.length == 0 {
+ self.range.deallocating_end(self.alloc.clone());
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.range.deallocating_next_back_unchecked(self.alloc.clone()) })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V, A: Allocator + Clone> Iterator for IntoIter<K, V, A> {
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ // SAFETY: we consume the dying handle immediately.
+ self.dying_next().map(unsafe { |kv| kv.into_key_val() })
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V, A: Allocator + Clone> DoubleEndedIterator for IntoIter<K, V, A> {
+ fn next_back(&mut self) -> Option<(K, V)> {
+ // SAFETY: we consume the dying handle immediately.
+ self.dying_next_back().map(unsafe { |kv| kv.into_key_val() })
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoIter<K, V, A> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V, A: Allocator + Clone> FusedIterator for IntoIter<K, V, A> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+ type Item = &'a K;
+
+ fn next(&mut self) -> Option<&'a K> {
+ self.inner.next().map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a K> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<&'a K> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<&'a K> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a K> {
+ self.inner.next_back().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Keys<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Keys<'_, K, V> {
+ fn clone(&self) -> Self {
+ Keys { inner: self.inner.clone() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+ type Item = &'a V;
+
+ fn next(&mut self) -> Option<&'a V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a V> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Values<'_, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Values<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Values<'_, K, V> {
+ fn clone(&self) -> Self {
+ Values { inner: self.inner.clone() }
+ }
+}
+
+/// An iterator produced by calling `drain_filter` on BTreeMap.
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+pub struct DrainFilter<
+ 'a,
+ K,
+ V,
+ F,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> where
+ F: 'a + FnMut(&K, &mut V) -> bool,
+{
+ pred: F,
+ inner: DrainFilterInner<'a, K, V>,
+ /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
+ alloc: A,
+}
+/// Most of the implementation of DrainFilter are generic over the type
+/// of the predicate, thus also serving for BTreeSet::DrainFilter.
+pub(super) struct DrainFilterInner<'a, K, V> {
+ /// Reference to the length field in the borrowed map, updated live.
+ length: &'a mut usize,
+ /// Buried reference to the root field in the borrowed map.
+ /// Wrapped in `Option` to allow drop handler to `take` it.
+ dormant_root: Option<DormantMutRef<'a, Root<K, V>>>,
+ /// Contains a leaf edge preceding the next element to be returned, or the last leaf edge.
+ /// Empty if the map has no root, if iteration went beyond the last leaf edge,
+ /// or if a panic occurred in the predicate.
+ cur_leaf_edge: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F, A: Allocator + Clone> Drop for DrainFilter<'_, K, V, F, A>
+where
+ F: FnMut(&K, &mut V) -> bool,
+{
+ fn drop(&mut self) {
+ self.for_each(drop);
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> fmt::Debug for DrainFilter<'_, K, V, F>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+ F: FnMut(&K, &mut V) -> bool,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("DrainFilter").field(&self.inner.peek()).finish()
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F, A: Allocator + Clone> Iterator for DrainFilter<'_, K, V, F, A>
+where
+ F: FnMut(&K, &mut V) -> bool,
+{
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ self.inner.next(&mut self.pred, self.alloc.clone())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K, V> DrainFilterInner<'a, K, V> {
+ /// Allow Debug implementations to predict the next element.
+ pub(super) fn peek(&self) -> Option<(&K, &V)> {
+ let edge = self.cur_leaf_edge.as_ref()?;
+ edge.reborrow().next_kv().ok().map(Handle::into_kv)
+ }
+
+ /// Implementation of a typical `DrainFilter::next` method, given the predicate.
+ pub(super) fn next<F, A: Allocator + Clone>(&mut self, pred: &mut F, alloc: A) -> Option<(K, V)>
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ while let Ok(mut kv) = self.cur_leaf_edge.take()?.next_kv() {
+ let (k, v) = kv.kv_mut();
+ if pred(k, v) {
+ *self.length -= 1;
+ let (kv, pos) = kv.remove_kv_tracking(
+ || {
+ // SAFETY: we will touch the root in a way that will not
+ // invalidate the position returned.
+ let root = unsafe { self.dormant_root.take().unwrap().awaken() };
+ root.pop_internal_level(alloc.clone());
+ self.dormant_root = Some(DormantMutRef::new(root).1);
+ },
+ alloc.clone(),
+ );
+ self.cur_leaf_edge = Some(pos);
+ return Some(kv);
+ }
+ self.cur_leaf_edge = Some(kv.next_leaf_edge());
+ }
+ None
+ }
+
+ /// Implementation of a typical `DrainFilter::size_hint` method.
+ pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
+ // In most of the btree iterators, `self.length` is the number of elements
+ // yet to be visited. Here, it includes elements that were visited and that
+ // the predicate decided not to drain. Making this upper bound more tight
+ // during iteration would require an extra field.
+ (0, Some(*self.length))
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for Range<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ self.inner.next_checked()
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+ type Item = &'a mut V;
+
+ fn next(&mut self) -> Option<&'a mut V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a mut V> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a mut V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> Iterator for IntoKeys<K, V, A> {
+ type Item = K;
+
+ fn next(&mut self) -> Option<K> {
+ self.inner.next().map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<K> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<K> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<K> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> DoubleEndedIterator for IntoKeys<K, V, A> {
+ fn next_back(&mut self) -> Option<K> {
+ self.inner.next_back().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoKeys<K, V, A> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> FusedIterator for IntoKeys<K, V, A> {}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> Iterator for IntoValues<K, V, A> {
+ type Item = V;
+
+ fn next(&mut self) -> Option<V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<V> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> DoubleEndedIterator for IntoValues<K, V, A> {
+ fn next_back(&mut self) -> Option<V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoValues<K, V, A> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "map_into_keys_values", since = "1.54.0")]
+impl<K, V, A: Allocator + Clone> FusedIterator for IntoValues<K, V, A> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ self.inner.next_back_checked()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Range<'_, K, V> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<K, V> Clone for Range<'_, K, V> {
+ fn clone(&self) -> Self {
+ Range { inner: self.inner.clone() }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ self.inner.next_checked()
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ self.inner.next_back_checked()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
+ fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
+ let mut inputs: Vec<_> = iter.into_iter().collect();
+
+ if inputs.is_empty() {
+ return BTreeMap::new();
+ }
+
+ // use stable sort to preserve the insertion order.
+ inputs.sort_by(|a, b| a.0.cmp(&b.0));
+ BTreeMap::bulk_build_from_sorted_iter(inputs, Global)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V, A: Allocator + Clone> Extend<(K, V)> for BTreeMap<K, V, A> {
+ #[inline]
+ fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
+ iter.into_iter().for_each(move |(k, v)| {
+ self.insert(k, v);
+ });
+ }
+
+ #[inline]
+ fn extend_one(&mut self, (k, v): (K, V)) {
+ self.insert(k, v);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, K: Ord + Copy, V: Copy, A: Allocator + Clone> Extend<(&'a K, &'a V)>
+ for BTreeMap<K, V, A>
+{
+ fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+ }
+
+ #[inline]
+ fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) {
+ self.insert(k, v);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Hash, V: Hash, A: Allocator + Clone> Hash for BTreeMap<K, V, A> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ state.write_length_prefix(self.len());
+ for elt in self {
+ elt.hash(state);
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Default for BTreeMap<K, V> {
+ /// Creates an empty `BTreeMap`.
+ fn default() -> BTreeMap<K, V> {
+ BTreeMap::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialEq, V: PartialEq, A: Allocator + Clone> PartialEq for BTreeMap<K, V, A> {
+ fn eq(&self, other: &BTreeMap<K, V, A>) -> bool {
+ self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Eq, V: Eq, A: Allocator + Clone> Eq for BTreeMap<K, V, A> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialOrd, V: PartialOrd, A: Allocator + Clone> PartialOrd for BTreeMap<K, V, A> {
+ #[inline]
+ fn partial_cmp(&self, other: &BTreeMap<K, V, A>) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V: Ord, A: Allocator + Clone> Ord for BTreeMap<K, V, A> {
+ #[inline]
+ fn cmp(&self, other: &BTreeMap<K, V, A>) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Debug, V: Debug, A: Allocator + Clone> Debug for BTreeMap<K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_map().entries(self.iter()).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, Q: ?Sized, V, A: Allocator + Clone> Index<&Q> for BTreeMap<K, V, A>
+where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+{
+ type Output = V;
+
+ /// Returns a reference to the value corresponding to the supplied key.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the key is not present in the `BTreeMap`.
+ #[inline]
+ fn index(&self, key: &Q) -> &V {
+ self.get(key).expect("no entry found for key")
+ }
+}
+
+#[stable(feature = "std_collections_from_array", since = "1.56.0")]
+impl<K: Ord, V, const N: usize> From<[(K, V); N]> for BTreeMap<K, V> {
+ /// Converts a `[(K, V); N]` into a `BTreeMap<(K, V)>`.
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let map1 = BTreeMap::from([(1, 2), (3, 4)]);
+ /// let map2: BTreeMap<_, _> = [(1, 2), (3, 4)].into();
+ /// assert_eq!(map1, map2);
+ /// ```
+ fn from(mut arr: [(K, V); N]) -> Self {
+ if N == 0 {
+ return BTreeMap::new();
+ }
+
+ // use stable sort to preserve the insertion order.
+ arr.sort_by(|a, b| a.0.cmp(&b.0));
+ BTreeMap::bulk_build_from_sorted_iter(arr, Global)
+ }
+}
+
+impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
+ /// Gets an iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "c");
+ /// map.insert(2, "b");
+ /// map.insert(1, "a");
+ ///
+ /// for (key, value) in map.iter() {
+ /// println!("{key}: {value}");
+ /// }
+ ///
+ /// let (first_key, first_value) = map.iter().next().unwrap();
+ /// assert_eq!((*first_key, *first_value), (1, "a"));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<'_, K, V> {
+ if let Some(root) = &self.root {
+ let full_range = root.reborrow().full_range();
+
+ Iter { range: full_range, length: self.length }
+ } else {
+ Iter { range: LazyLeafRange::none(), length: 0 }
+ }
+ }
+
+ /// Gets a mutable iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::from([
+ /// ("a", 1),
+ /// ("b", 2),
+ /// ("c", 3),
+ /// ]);
+ ///
+ /// // add 10 to the value if the key isn't "a"
+ /// for (key, value) in map.iter_mut() {
+ /// if key != &"a" {
+ /// *value += 10;
+ /// }
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
+ if let Some(root) = &mut self.root {
+ let full_range = root.borrow_valmut().full_range();
+
+ IterMut { range: full_range, length: self.length, _marker: PhantomData }
+ } else {
+ IterMut { range: LazyLeafRange::none(), length: 0, _marker: PhantomData }
+ }
+ }
+
+ /// Gets an iterator over the keys of the map, in sorted order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(2, "b");
+ /// a.insert(1, "a");
+ ///
+ /// let keys: Vec<_> = a.keys().cloned().collect();
+ /// assert_eq!(keys, [1, 2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn keys(&self) -> Keys<'_, K, V> {
+ Keys { inner: self.iter() }
+ }
+
+ /// Gets an iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "hello");
+ /// a.insert(2, "goodbye");
+ ///
+ /// let values: Vec<&str> = a.values().cloned().collect();
+ /// assert_eq!(values, ["hello", "goodbye"]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn values(&self) -> Values<'_, K, V> {
+ Values { inner: self.iter() }
+ }
+
+ /// Gets a mutable iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, String::from("hello"));
+ /// a.insert(2, String::from("goodbye"));
+ ///
+ /// for value in a.values_mut() {
+ /// value.push_str("!");
+ /// }
+ ///
+ /// let values: Vec<String> = a.values().cloned().collect();
+ /// assert_eq!(values, [String::from("hello!"),
+ /// String::from("goodbye!")]);
+ /// ```
+ #[stable(feature = "map_values_mut", since = "1.10.0")]
+ pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
+ ValuesMut { inner: self.iter_mut() }
+ }
+
+ /// Returns the number of elements in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert_eq!(a.len(), 0);
+ /// a.insert(1, "a");
+ /// assert_eq!(a.len(), 1);
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ pub const fn len(&self) -> usize {
+ self.length
+ }
+
+ /// Returns `true` if the map contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert!(a.is_empty());
+ /// a.insert(1, "a");
+ /// assert!(!a.is_empty());
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ pub const fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/library/alloc/src/collections/btree/map/entry.rs b/library/alloc/src/collections/btree/map/entry.rs
new file mode 100644
index 000000000..b6eecf9b0
--- /dev/null
+++ b/library/alloc/src/collections/btree/map/entry.rs
@@ -0,0 +1,555 @@
+use core::fmt::{self, Debug};
+use core::marker::PhantomData;
+use core::mem;
+
+use crate::alloc::{Allocator, Global};
+
+use super::super::borrow::DormantMutRef;
+use super::super::node::{marker, Handle, NodeRef};
+use super::BTreeMap;
+
+use Entry::*;
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
+///
+/// [`entry`]: BTreeMap::entry
+#[stable(feature = "rust1", since = "1.0.0")]
+#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeEntry")]
+pub enum Entry<
+ 'a,
+ K: 'a,
+ V: 'a,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ /// A vacant entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V, A>),
+
+ /// An occupied entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V, A>),
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V: Debug, A: Allocator + Clone> Debug for Entry<'_, K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+ Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+ }
+ }
+}
+
+/// A view into a vacant entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct VacantEntry<
+ 'a,
+ K,
+ V,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ pub(super) key: K,
+ /// `None` for a (empty) map without root
+ pub(super) handle: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+ pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V, A>>,
+
+ /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
+ pub(super) alloc: A,
+
+ // Be invariant in `K` and `V`
+ pub(super) _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V, A: Allocator + Clone> Debug for VacantEntry<'_, K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("VacantEntry").field(self.key()).finish()
+ }
+}
+
+/// A view into an occupied entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct OccupiedEntry<
+ 'a,
+ K,
+ V,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ pub(super) handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
+ pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V, A>>,
+
+ /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
+ pub(super) alloc: A,
+
+ // Be invariant in `K` and `V`
+ pub(super) _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V: Debug, A: Allocator + Clone> Debug for OccupiedEntry<'_, K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
+ }
+}
+
+/// The error returned by [`try_insert`](BTreeMap::try_insert) when the key already exists.
+///
+/// Contains the occupied entry, and the value that was not inserted.
+#[unstable(feature = "map_try_insert", issue = "82766")]
+pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator + Clone = Global> {
+ /// The entry in the map that was already occupied.
+ pub entry: OccupiedEntry<'a, K, V, A>,
+ /// The value which was not inserted, because the entry was already occupied.
+ pub value: V,
+}
+
+#[unstable(feature = "map_try_insert", issue = "82766")]
+impl<K: Debug + Ord, V: Debug, A: Allocator + Clone> Debug for OccupiedError<'_, K, V, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("OccupiedError")
+ .field("key", self.entry.key())
+ .field("old_value", self.entry.get())
+ .field("new_value", &self.value)
+ .finish()
+ }
+}
+
+#[unstable(feature = "map_try_insert", issue = "82766")]
+impl<'a, K: Debug + Ord, V: Debug, A: Allocator + Clone> fmt::Display
+ for OccupiedError<'a, K, V, A>
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "failed to insert {:?}, key {:?} already exists with value {:?}",
+ self.value,
+ self.entry.key(),
+ self.entry.get(),
+ )
+ }
+}
+
+impl<'a, K: Ord, V, A: Allocator + Clone> Entry<'a, K, V, A> {
+ /// Ensures a value is in the entry by inserting the default if empty, and returns
+ /// a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert(self, default: V) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default),
+ }
+ }
+
+ /// Ensures a value is in the entry by inserting the result of the default function if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, String> = BTreeMap::new();
+ /// let s = "hoho".to_string();
+ ///
+ /// map.entry("poneyland").or_insert_with(|| s);
+ ///
+ /// assert_eq!(map["poneyland"], "hoho".to_string());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default()),
+ }
+ }
+
+ /// Ensures a value is in the entry by inserting, if empty, the result of the default function.
+ /// This method allows for generating key-derived values for insertion by providing the default
+ /// function a reference to the key that was moved during the `.entry(key)` method call.
+ ///
+ /// The reference to the moved key is provided so that cloning or copying the key is
+ /// unnecessary, unlike with `.or_insert_with(|| ... )`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+ ///
+ /// assert_eq!(map["poneyland"], 9);
+ /// ```
+ #[inline]
+ #[stable(feature = "or_insert_with_key", since = "1.50.0")]
+ pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => {
+ let value = default(entry.key());
+ entry.insert(value)
+ }
+ }
+ }
+
+ /// Returns a reference to this entry's key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ match *self {
+ Occupied(ref entry) => entry.key(),
+ Vacant(ref entry) => entry.key(),
+ }
+ }
+
+ /// Provides in-place mutable access to an occupied entry before any
+ /// potential inserts into the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 42);
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 43);
+ /// ```
+ #[stable(feature = "entry_and_modify", since = "1.26.0")]
+ pub fn and_modify<F>(self, f: F) -> Self
+ where
+ F: FnOnce(&mut V),
+ {
+ match self {
+ Occupied(mut entry) => {
+ f(entry.get_mut());
+ Occupied(entry)
+ }
+ Vacant(entry) => Vacant(entry),
+ }
+ }
+}
+
+impl<'a, K: Ord, V: Default, A: Allocator + Clone> Entry<'a, K, V, A> {
+ #[stable(feature = "entry_or_default", since = "1.28.0")]
+ /// Ensures a value is in the entry by inserting the default value if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
+ /// map.entry("poneyland").or_default();
+ ///
+ /// assert_eq!(map["poneyland"], None);
+ /// ```
+ pub fn or_default(self) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(Default::default()),
+ }
+ }
+}
+
+impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
+ /// Gets a reference to the key that would be used when inserting a value
+ /// through the VacantEntry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Take ownership of the key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// if let Entry::Vacant(v) = map.entry("poneyland") {
+ /// v.into_key();
+ /// }
+ /// ```
+ #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+ pub fn into_key(self) -> K {
+ self.key
+ }
+
+ /// Sets the value of the entry with the `VacantEntry`'s key,
+ /// and returns a mutable reference to it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, u32> = BTreeMap::new();
+ ///
+ /// if let Entry::Vacant(o) = map.entry("poneyland") {
+ /// o.insert(37);
+ /// }
+ /// assert_eq!(map["poneyland"], 37);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(self, value: V) -> &'a mut V {
+ let out_ptr = match self.handle {
+ None => {
+ // SAFETY: There is no tree yet so no reference to it exists.
+ let map = unsafe { self.dormant_map.awaken() };
+ let mut root = NodeRef::new_leaf(self.alloc.clone());
+ let val_ptr = root.borrow_mut().push(self.key, value) as *mut V;
+ map.root = Some(root.forget_type());
+ map.length = 1;
+ val_ptr
+ }
+ Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) {
+ (None, val_ptr) => {
+ // SAFETY: We have consumed self.handle.
+ let map = unsafe { self.dormant_map.awaken() };
+ map.length += 1;
+ val_ptr
+ }
+ (Some(ins), val_ptr) => {
+ drop(ins.left);
+ // SAFETY: We have consumed self.handle and dropped the
+ // remaining reference to the tree, ins.left.
+ let map = unsafe { self.dormant_map.awaken() };
+ let root = map.root.as_mut().unwrap(); // same as ins.left
+ root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right);
+ map.length += 1;
+ val_ptr
+ }
+ },
+ };
+ // Now that we have finished growing the tree using borrowed references,
+ // dereference the pointer to a part of it, that we picked up along the way.
+ unsafe { &mut *out_ptr }
+ }
+}
+
+impl<'a, K: Ord, V, A: Allocator + Clone> OccupiedEntry<'a, K, V, A> {
+ /// Gets a reference to the key in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[must_use]
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ self.handle.reborrow().into_kv().0
+ }
+
+ /// Take ownership of the key and value from the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// // We delete the entry from the map.
+ /// o.remove_entry();
+ /// }
+ ///
+ /// // If now try to get the value, it will panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+ #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+ pub fn remove_entry(self) -> (K, V) {
+ self.remove_kv()
+ }
+
+ /// Gets a reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.get(), &12);
+ /// }
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get(&self) -> &V {
+ self.handle.reborrow().into_kv().1
+ }
+
+ /// Gets a mutable reference to the value in the entry.
+ ///
+ /// If you need a reference to the `OccupiedEntry` that may outlive the
+ /// destruction of the `Entry` value, see [`into_mut`].
+ ///
+ /// [`into_mut`]: OccupiedEntry::into_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// *o.get_mut() += 10;
+ /// assert_eq!(*o.get(), 22);
+ ///
+ /// // We can use the same Entry multiple times.
+ /// *o.get_mut() += 2;
+ /// }
+ /// assert_eq!(map["poneyland"], 24);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.handle.kv_mut().1
+ }
+
+ /// Converts the entry into a mutable reference to its value.
+ ///
+ /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+ ///
+ /// [`get_mut`]: OccupiedEntry::get_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// *o.into_mut() += 10;
+ /// }
+ /// assert_eq!(map["poneyland"], 22);
+ /// ```
+ #[must_use = "`self` will be dropped if the result is not used"]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn into_mut(self) -> &'a mut V {
+ self.handle.into_val_mut()
+ }
+
+ /// Sets the value of the entry with the `OccupiedEntry`'s key,
+ /// and returns the entry's old value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// assert_eq!(o.insert(15), 12);
+ /// }
+ /// assert_eq!(map["poneyland"], 15);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, value: V) -> V {
+ mem::replace(self.get_mut(), value)
+ }
+
+ /// Takes the value of the entry out of the map, and returns it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.remove(), 12);
+ /// }
+ /// // If we try to get "poneyland"'s value, it'll panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(self) -> V {
+ self.remove_kv().1
+ }
+
+ // Body of `remove_entry`, probably separate because the name reflects the returned pair.
+ pub(super) fn remove_kv(self) -> (K, V) {
+ let mut emptied_internal_root = false;
+ let (old_kv, _) =
+ self.handle.remove_kv_tracking(|| emptied_internal_root = true, self.alloc.clone());
+ // SAFETY: we consumed the intermediate root borrow, `self.handle`.
+ let map = unsafe { self.dormant_map.awaken() };
+ map.length -= 1;
+ if emptied_internal_root {
+ let root = map.root.as_mut().unwrap();
+ root.pop_internal_level(self.alloc);
+ }
+ old_kv
+ }
+}
diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs
new file mode 100644
index 000000000..4c372b1d6
--- /dev/null
+++ b/library/alloc/src/collections/btree/map/tests.rs
@@ -0,0 +1,2338 @@
+use super::super::testing::crash_test::{CrashTestDummy, Panic};
+use super::super::testing::ord_chaos::{Cyclic3, Governed, Governor};
+use super::super::testing::rng::DeterministicRng;
+use super::Entry::{Occupied, Vacant};
+use super::*;
+use crate::boxed::Box;
+use crate::fmt::Debug;
+use crate::rc::Rc;
+use crate::string::{String, ToString};
+use crate::vec::Vec;
+use std::cmp::Ordering;
+use std::convert::TryFrom;
+use std::iter::{self, FromIterator};
+use std::mem;
+use std::ops::Bound::{self, Excluded, Included, Unbounded};
+use std::ops::RangeBounds;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
+
+// Minimum number of elements to insert, to guarantee a tree with 2 levels,
+// i.e., a tree who's root is an internal node at height 1, with edges to leaf nodes.
+// It's not the minimum size: removing an element from such a tree does not always reduce height.
+const MIN_INSERTS_HEIGHT_1: usize = node::CAPACITY + 1;
+
+// Minimum number of elements to insert in ascending order, to guarantee a tree with 3 levels,
+// i.e., a tree who's root is an internal node at height 2, with edges to more internal nodes.
+// It's not the minimum size: removing an element from such a tree does not always reduce height.
+const MIN_INSERTS_HEIGHT_2: usize = 89;
+
+// Gathers all references from a mutable iterator and makes sure Miri notices if
+// using them is dangerous.
+fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator<Item = &'a mut T>) {
+ // Gather all those references.
+ let mut refs: Vec<&mut T> = iter.collect();
+ // Use them all. Twice, to be sure we got all interleavings.
+ for r in refs.iter_mut() {
+ mem::swap(dummy, r);
+ }
+ for r in refs {
+ mem::swap(dummy, r);
+ }
+}
+
+impl<K, V> BTreeMap<K, V> {
+ // Panics if the map (or the code navigating it) is corrupted.
+ fn check_invariants(&self) {
+ if let Some(root) = &self.root {
+ let root_node = root.reborrow();
+
+ // Check the back pointers top-down, before we attempt to rely on
+ // more serious navigation code.
+ assert!(root_node.ascend().is_err());
+ root_node.assert_back_pointers();
+
+ // Check consistency of `length` with what navigation code encounters.
+ assert_eq!(self.length, root_node.calc_length());
+
+ // Lastly, check the invariant causing the least harm.
+ root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 });
+ } else {
+ assert_eq!(self.length, 0);
+ }
+
+ // Check that `assert_strictly_ascending` will encounter all keys.
+ assert_eq!(self.length, self.keys().count());
+ }
+
+ // Panics if the map is corrupted or if the keys are not in strictly
+ // ascending order, in the current opinion of the `Ord` implementation.
+ // If the `Ord` implementation violates transitivity, this method does not
+ // guarantee that all keys are unique, just that adjacent keys are unique.
+ fn check(&self)
+ where
+ K: Debug + Ord,
+ {
+ self.check_invariants();
+ self.assert_strictly_ascending();
+ }
+
+ // Returns the height of the root, if any.
+ fn height(&self) -> Option<usize> {
+ self.root.as_ref().map(node::Root::height)
+ }
+
+ fn dump_keys(&self) -> String
+ where
+ K: Debug,
+ {
+ if let Some(root) = self.root.as_ref() {
+ root.reborrow().dump_keys()
+ } else {
+ String::from("not yet allocated")
+ }
+ }
+
+ // Panics if the keys are not in strictly ascending order.
+ fn assert_strictly_ascending(&self)
+ where
+ K: Debug + Ord,
+ {
+ let mut keys = self.keys();
+ if let Some(mut previous) = keys.next() {
+ for next in keys {
+ assert!(previous < next, "{:?} >= {:?}", previous, next);
+ previous = next;
+ }
+ }
+ }
+
+ // Transform the tree to minimize wasted space, obtaining fewer nodes that
+ // are mostly filled up to their capacity. The same compact tree could have
+ // been obtained by inserting keys in a shrewd order.
+ fn compact(&mut self)
+ where
+ K: Ord,
+ {
+ let iter = mem::take(self).into_iter();
+ if !iter.is_empty() {
+ self.root.insert(Root::new(*self.alloc)).bulk_push(iter, &mut self.length, *self.alloc);
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
+ fn assert_min_len(self, min_len: usize) {
+ assert!(self.len() >= min_len, "node len {} < {}", self.len(), min_len);
+ if let node::ForceResult::Internal(node) = self.force() {
+ for idx in 0..=node.len() {
+ let edge = unsafe { Handle::new_edge(node, idx) };
+ edge.descend().assert_min_len(MIN_LEN);
+ }
+ }
+ }
+}
+
+// Tests our value of MIN_INSERTS_HEIGHT_2. Failure may mean you just need to
+// adapt that value to match a change in node::CAPACITY or the choices made
+// during insertion, otherwise other test cases may fail or be less useful.
+#[test]
+fn test_levels() {
+ let mut map = BTreeMap::new();
+ map.check();
+ assert_eq!(map.height(), None);
+ assert_eq!(map.len(), 0);
+
+ map.insert(0, ());
+ while map.height() == Some(0) {
+ let last_key = *map.last_key_value().unwrap().0;
+ map.insert(last_key + 1, ());
+ }
+ map.check();
+ // Structure:
+ // - 1 element in internal root node with 2 children
+ // - 6 elements in left leaf child
+ // - 5 elements in right leaf child
+ assert_eq!(map.height(), Some(1));
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1, "{}", map.dump_keys());
+
+ while map.height() == Some(1) {
+ let last_key = *map.last_key_value().unwrap().0;
+ map.insert(last_key + 1, ());
+ }
+ map.check();
+ // Structure:
+ // - 1 element in internal root node with 2 children
+ // - 6 elements in left internal child with 7 grandchildren
+ // - 42 elements in left child's 7 grandchildren with 6 elements each
+ // - 5 elements in right internal child with 6 grandchildren
+ // - 30 elements in right child's 5 first grandchildren with 6 elements each
+ // - 5 elements in right child's last grandchild
+ assert_eq!(map.height(), Some(2));
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2, "{}", map.dump_keys());
+}
+
+// Ensures the testing infrastructure usually notices order violations.
+#[test]
+#[should_panic]
+fn test_check_ord_chaos() {
+ let gov = Governor::new();
+ let map = BTreeMap::from([(Governed(1, &gov), ()), (Governed(2, &gov), ())]);
+ gov.flip();
+ map.check();
+}
+
+// Ensures the testing infrastructure doesn't always mind order violations.
+#[test]
+fn test_check_invariants_ord_chaos() {
+ let gov = Governor::new();
+ let map = BTreeMap::from([(Governed(1, &gov), ()), (Governed(2, &gov), ())]);
+ gov.flip();
+ map.check_invariants();
+}
+
+#[test]
+fn test_basic_large() {
+ let mut map = BTreeMap::new();
+ // Miri is too slow
+ let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 } else { 10000 };
+ let size = size + (size % 2); // round up to even number
+ assert_eq!(map.len(), 0);
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 10 * i), None);
+ assert_eq!(map.len(), i + 1);
+ }
+
+ assert_eq!(map.first_key_value(), Some((&0, &0)));
+ assert_eq!(map.last_key_value(), Some((&(size - 1), &(10 * (size - 1)))));
+ assert_eq!(map.first_entry().unwrap().key(), &0);
+ assert_eq!(map.last_entry().unwrap().key(), &(size - 1));
+
+ for i in 0..size {
+ assert_eq!(map.get(&i).unwrap(), &(i * 10));
+ }
+
+ for i in size..size * 2 {
+ assert_eq!(map.get(&i), None);
+ }
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 100 * i), Some(10 * i));
+ assert_eq!(map.len(), size);
+ }
+
+ for i in 0..size {
+ assert_eq!(map.get(&i).unwrap(), &(i * 100));
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(i * 2)), Some(i * 200));
+ assert_eq!(map.len(), size - i - 1);
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.get(&(2 * i)), None);
+ assert_eq!(map.get(&(2 * i + 1)).unwrap(), &(i * 200 + 100));
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(2 * i)), None);
+ assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
+ assert_eq!(map.len(), size / 2 - i - 1);
+ }
+ map.check();
+}
+
+#[test]
+fn test_basic_small() {
+ let mut map = BTreeMap::new();
+ // Empty, root is absent (None):
+ assert_eq!(map.remove(&1), None);
+ assert_eq!(map.len(), 0);
+ assert_eq!(map.get(&1), None);
+ assert_eq!(map.get_mut(&1), None);
+ assert_eq!(map.first_key_value(), None);
+ assert_eq!(map.last_key_value(), None);
+ assert_eq!(map.keys().count(), 0);
+ assert_eq!(map.values().count(), 0);
+ assert_eq!(map.range(..).next(), None);
+ assert_eq!(map.range(..1).next(), None);
+ assert_eq!(map.range(1..).next(), None);
+ assert_eq!(map.range(1..=1).next(), None);
+ assert_eq!(map.range(1..2).next(), None);
+ assert_eq!(map.height(), None);
+ assert_eq!(map.insert(1, 1), None);
+ assert_eq!(map.height(), Some(0));
+ map.check();
+
+ // 1 key-value pair:
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get(&1), Some(&1));
+ assert_eq!(map.get_mut(&1), Some(&mut 1));
+ assert_eq!(map.first_key_value(), Some((&1, &1)));
+ assert_eq!(map.last_key_value(), Some((&1, &1)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&1]);
+ assert_eq!(map.insert(1, 2), Some(1));
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get(&1), Some(&2));
+ assert_eq!(map.get_mut(&1), Some(&mut 2));
+ assert_eq!(map.first_key_value(), Some((&1, &2)));
+ assert_eq!(map.last_key_value(), Some((&1, &2)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&2]);
+ assert_eq!(map.insert(2, 4), None);
+ assert_eq!(map.height(), Some(0));
+ map.check();
+
+ // 2 key-value pairs:
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.get(&2), Some(&4));
+ assert_eq!(map.get_mut(&2), Some(&mut 4));
+ assert_eq!(map.first_key_value(), Some((&1, &2)));
+ assert_eq!(map.last_key_value(), Some((&2, &4)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1, &2]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&2, &4]);
+ assert_eq!(map.remove(&1), Some(2));
+ assert_eq!(map.height(), Some(0));
+ map.check();
+
+ // 1 key-value pair:
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get(&1), None);
+ assert_eq!(map.get_mut(&1), None);
+ assert_eq!(map.get(&2), Some(&4));
+ assert_eq!(map.get_mut(&2), Some(&mut 4));
+ assert_eq!(map.first_key_value(), Some((&2, &4)));
+ assert_eq!(map.last_key_value(), Some((&2, &4)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&2]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&4]);
+ assert_eq!(map.remove(&2), Some(4));
+ assert_eq!(map.height(), Some(0));
+ map.check();
+
+ // Empty but root is owned (Some(...)):
+ assert_eq!(map.len(), 0);
+ assert_eq!(map.get(&1), None);
+ assert_eq!(map.get_mut(&1), None);
+ assert_eq!(map.first_key_value(), None);
+ assert_eq!(map.last_key_value(), None);
+ assert_eq!(map.keys().count(), 0);
+ assert_eq!(map.values().count(), 0);
+ assert_eq!(map.range(..).next(), None);
+ assert_eq!(map.range(..1).next(), None);
+ assert_eq!(map.range(1..).next(), None);
+ assert_eq!(map.range(1..=1).next(), None);
+ assert_eq!(map.range(1..2).next(), None);
+ assert_eq!(map.remove(&1), None);
+ assert_eq!(map.height(), Some(0));
+ map.check();
+}
+
+#[test]
+fn test_iter() {
+ // Miri is too slow
+ let size = if cfg!(miri) { 200 } else { 10000 };
+ let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i)));
+
+ fn test<T>(size: usize, mut iter: T)
+ where
+ T: Iterator<Item = (usize, usize)>,
+ {
+ for i in 0..size {
+ assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
+ assert_eq!(iter.next().unwrap(), (i, i));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ }
+ test(size, map.iter().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter());
+}
+
+#[test]
+fn test_iter_rev() {
+ // Miri is too slow
+ let size = if cfg!(miri) { 200 } else { 10000 };
+ let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i)));
+
+ fn test<T>(size: usize, mut iter: T)
+ where
+ T: Iterator<Item = (usize, usize)>,
+ {
+ for i in 0..size {
+ assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
+ assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ }
+ test(size, map.iter().rev().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().rev().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter().rev());
+}
+
+// Specifically tests iter_mut's ability to mutate the value of pairs in-line.
+fn do_test_iter_mut_mutation<T>(size: usize)
+where
+ T: Copy + Debug + Ord + TryFrom<usize>,
+ <T as TryFrom<usize>>::Error: Debug,
+{
+ let zero = T::try_from(0).unwrap();
+ let mut map = BTreeMap::from_iter((0..size).map(|i| (T::try_from(i).unwrap(), zero)));
+
+ // Forward and backward iteration sees enough pairs (also tested elsewhere)
+ assert_eq!(map.iter_mut().count(), size);
+ assert_eq!(map.iter_mut().rev().count(), size);
+
+ // Iterate forwards, trying to mutate to unique values
+ for (i, (k, v)) in map.iter_mut().enumerate() {
+ assert_eq!(*k, T::try_from(i).unwrap());
+ assert_eq!(*v, zero);
+ *v = T::try_from(i + 1).unwrap();
+ }
+
+ // Iterate backwards, checking that mutations succeeded and trying to mutate again
+ for (i, (k, v)) in map.iter_mut().rev().enumerate() {
+ assert_eq!(*k, T::try_from(size - i - 1).unwrap());
+ assert_eq!(*v, T::try_from(size - i).unwrap());
+ *v = T::try_from(2 * size - i).unwrap();
+ }
+
+ // Check that backward mutations succeeded
+ for (i, (k, v)) in map.iter_mut().enumerate() {
+ assert_eq!(*k, T::try_from(i).unwrap());
+ assert_eq!(*v, T::try_from(size + i + 1).unwrap());
+ }
+ map.check();
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
+#[repr(align(32))]
+struct Align32(usize);
+
+impl TryFrom<usize> for Align32 {
+ type Error = ();
+
+ fn try_from(s: usize) -> Result<Align32, ()> {
+ Ok(Align32(s))
+ }
+}
+
+#[test]
+fn test_iter_mut_mutation() {
+ // Check many alignments and trees with roots at various heights.
+ do_test_iter_mut_mutation::<u8>(0);
+ do_test_iter_mut_mutation::<u8>(1);
+ do_test_iter_mut_mutation::<u8>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u8>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<u16>(1);
+ do_test_iter_mut_mutation::<u16>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u16>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<u32>(1);
+ do_test_iter_mut_mutation::<u32>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u32>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<u64>(1);
+ do_test_iter_mut_mutation::<u64>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u64>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<u128>(1);
+ do_test_iter_mut_mutation::<u128>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u128>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<Align32>(1);
+ do_test_iter_mut_mutation::<Align32>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<Align32>(MIN_INSERTS_HEIGHT_2);
+}
+
+#[test]
+fn test_values_mut() {
+ let mut a = BTreeMap::from_iter((0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)));
+ test_all_refs(&mut 13, a.values_mut());
+ a.check();
+}
+
+#[test]
+fn test_values_mut_mutation() {
+ let mut a = BTreeMap::new();
+ a.insert(1, String::from("hello"));
+ a.insert(2, String::from("goodbye"));
+
+ for value in a.values_mut() {
+ value.push_str("!");
+ }
+
+ let values = Vec::from_iter(a.values().cloned());
+ assert_eq!(values, [String::from("hello!"), String::from("goodbye!")]);
+ a.check();
+}
+
+#[test]
+fn test_iter_entering_root_twice() {
+ let mut map = BTreeMap::from([(0, 0), (1, 1)]);
+ let mut it = map.iter_mut();
+ let front = it.next().unwrap();
+ let back = it.next_back().unwrap();
+ assert_eq!(front, (&0, &mut 0));
+ assert_eq!(back, (&1, &mut 1));
+ *front.1 = 24;
+ *back.1 = 42;
+ assert_eq!(front, (&0, &mut 24));
+ assert_eq!(back, (&1, &mut 42));
+ assert_eq!(it.next(), None);
+ assert_eq!(it.next_back(), None);
+ map.check();
+}
+
+#[test]
+fn test_iter_descending_to_same_node_twice() {
+ let mut map = BTreeMap::from_iter((0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)));
+ let mut it = map.iter_mut();
+ // Descend into first child.
+ let front = it.next().unwrap();
+ // Descend into first child again, after running through second child.
+ while it.next_back().is_some() {}
+ // Check immutable access.
+ assert_eq!(front, (&0, &mut 0));
+ // Perform mutable access.
+ *front.1 = 42;
+ map.check();
+}
+
+#[test]
+fn test_iter_mixed() {
+ // Miri is too slow
+ let size = if cfg!(miri) { 200 } else { 10000 };
+
+ let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i)));
+
+ fn test<T>(size: usize, mut iter: T)
+ where
+ T: Iterator<Item = (usize, usize)> + DoubleEndedIterator,
+ {
+ for i in 0..size / 4 {
+ assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
+ assert_eq!(iter.next().unwrap(), (i, i));
+ assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1));
+ }
+ for i in size / 4..size * 3 / 4 {
+ assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i)));
+ assert_eq!(iter.next().unwrap(), (i, i));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ }
+ test(size, map.iter().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter());
+}
+
+#[test]
+fn test_iter_min_max() {
+ let mut a = BTreeMap::new();
+ assert_eq!(a.iter().min(), None);
+ assert_eq!(a.iter().max(), None);
+ assert_eq!(a.iter_mut().min(), None);
+ assert_eq!(a.iter_mut().max(), None);
+ assert_eq!(a.range(..).min(), None);
+ assert_eq!(a.range(..).max(), None);
+ assert_eq!(a.range_mut(..).min(), None);
+ assert_eq!(a.range_mut(..).max(), None);
+ assert_eq!(a.keys().min(), None);
+ assert_eq!(a.keys().max(), None);
+ assert_eq!(a.values().min(), None);
+ assert_eq!(a.values().max(), None);
+ assert_eq!(a.values_mut().min(), None);
+ assert_eq!(a.values_mut().max(), None);
+ a.insert(1, 42);
+ a.insert(2, 24);
+ assert_eq!(a.iter().min(), Some((&1, &42)));
+ assert_eq!(a.iter().max(), Some((&2, &24)));
+ assert_eq!(a.iter_mut().min(), Some((&1, &mut 42)));
+ assert_eq!(a.iter_mut().max(), Some((&2, &mut 24)));
+ assert_eq!(a.range(..).min(), Some((&1, &42)));
+ assert_eq!(a.range(..).max(), Some((&2, &24)));
+ assert_eq!(a.range_mut(..).min(), Some((&1, &mut 42)));
+ assert_eq!(a.range_mut(..).max(), Some((&2, &mut 24)));
+ assert_eq!(a.keys().min(), Some(&1));
+ assert_eq!(a.keys().max(), Some(&2));
+ assert_eq!(a.values().min(), Some(&24));
+ assert_eq!(a.values().max(), Some(&42));
+ assert_eq!(a.values_mut().min(), Some(&mut 24));
+ assert_eq!(a.values_mut().max(), Some(&mut 42));
+ a.check();
+}
+
+fn range_keys(map: &BTreeMap<i32, i32>, range: impl RangeBounds<i32>) -> Vec<i32> {
+ Vec::from_iter(map.range(range).map(|(&k, &v)| {
+ assert_eq!(k, v);
+ k
+ }))
+}
+
+#[test]
+fn test_range_small() {
+ let size = 4;
+
+ let all = Vec::from_iter(1..=size);
+ let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
+ let map = BTreeMap::from_iter(all.iter().copied().map(|i| (i, i)));
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
+ assert_eq!(range_keys(&map, ..), all);
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
+ assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
+
+ assert_eq!(range_keys(&map, ..3), vec![1, 2]);
+ assert_eq!(range_keys(&map, 3..), vec![3, 4]);
+ assert_eq!(range_keys(&map, 2..=3), vec![2, 3]);
+}
+
+#[test]
+fn test_range_height_1() {
+ // Tests tree with a root and 2 leaves. We test around the middle of the
+ // keys because one of those is the single key in the root node.
+ let map = BTreeMap::from_iter((0..MIN_INSERTS_HEIGHT_1 as i32).map(|i| (i, i)));
+ let middle = MIN_INSERTS_HEIGHT_1 as i32 / 2;
+ for root in middle - 2..=middle + 2 {
+ assert_eq!(range_keys(&map, (Excluded(root), Excluded(root + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(root), Included(root + 1))), vec![root + 1]);
+ assert_eq!(range_keys(&map, (Included(root), Excluded(root + 1))), vec![root]);
+ assert_eq!(range_keys(&map, (Included(root), Included(root + 1))), vec![root, root + 1]);
+
+ assert_eq!(range_keys(&map, (Excluded(root - 1), Excluded(root))), vec![]);
+ assert_eq!(range_keys(&map, (Included(root - 1), Excluded(root))), vec![root - 1]);
+ assert_eq!(range_keys(&map, (Excluded(root - 1), Included(root))), vec![root]);
+ assert_eq!(range_keys(&map, (Included(root - 1), Included(root))), vec![root - 1, root]);
+ }
+}
+
+#[test]
+fn test_range_large() {
+ let size = 200;
+
+ let all = Vec::from_iter(1..=size);
+ let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
+ let map = BTreeMap::from_iter(all.iter().copied().map(|i| (i, i)));
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
+ assert_eq!(range_keys(&map, ..), all);
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
+ assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
+
+ fn check<'a, L, R>(lhs: L, rhs: R)
+ where
+ L: IntoIterator<Item = (&'a i32, &'a i32)>,
+ R: IntoIterator<Item = (&'a i32, &'a i32)>,
+ {
+ assert_eq!(Vec::from_iter(lhs), Vec::from_iter(rhs));
+ }
+
+ check(map.range(..=100), map.range(..101));
+ check(map.range(5..=8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]);
+ check(map.range(-1..=2), vec![(&1, &1), (&2, &2)]);
+}
+
+#[test]
+fn test_range_inclusive_max_value() {
+ let max = usize::MAX;
+ let map = BTreeMap::from([(max, 0)]);
+ assert_eq!(Vec::from_iter(map.range(max..=max)), &[(&max, &0)]);
+}
+
+#[test]
+fn test_range_equal_empty_cases() {
+ let map = BTreeMap::from_iter((0..5).map(|i| (i, i)));
+ assert_eq!(map.range((Included(2), Excluded(2))).next(), None);
+ assert_eq!(map.range((Excluded(2), Included(2))).next(), None);
+}
+
+#[test]
+#[should_panic]
+fn test_range_equal_excluded() {
+ let map = BTreeMap::from_iter((0..5).map(|i| (i, i)));
+ let _ = map.range((Excluded(2), Excluded(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_1() {
+ let map = BTreeMap::from_iter((0..5).map(|i| (i, i)));
+ let _ = map.range((Included(3), Included(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_2() {
+ let map = BTreeMap::from_iter((0..5).map(|i| (i, i)));
+ let _ = map.range((Included(3), Excluded(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_3() {
+ let map = BTreeMap::from_iter((0..5).map(|i| (i, i)));
+ let _ = map.range((Excluded(3), Included(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_4() {
+ let map = BTreeMap::from_iter((0..5).map(|i| (i, i)));
+ let _ = map.range((Excluded(3), Excluded(2)));
+}
+
+#[test]
+fn test_range_finding_ill_order_in_map() {
+ let mut map = BTreeMap::new();
+ map.insert(Cyclic3::B, ());
+ // Lacking static_assert, call `range` conditionally, to emphasise that
+ // we cause a different panic than `test_range_backwards_1` does.
+ // A more refined `should_panic` would be welcome.
+ if Cyclic3::C < Cyclic3::A {
+ let _ = map.range(Cyclic3::C..=Cyclic3::A);
+ }
+}
+
+#[test]
+fn test_range_finding_ill_order_in_range_ord() {
+ // Has proper order the first time asked, then flips around.
+ struct EvilTwin(i32);
+
+ impl PartialOrd for EvilTwin {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+ }
+
+ static COMPARES: AtomicUsize = AtomicUsize::new(0);
+ impl Ord for EvilTwin {
+ fn cmp(&self, other: &Self) -> Ordering {
+ let ord = self.0.cmp(&other.0);
+ if COMPARES.fetch_add(1, SeqCst) > 0 { ord.reverse() } else { ord }
+ }
+ }
+
+ impl PartialEq for EvilTwin {
+ fn eq(&self, other: &Self) -> bool {
+ self.0.eq(&other.0)
+ }
+ }
+
+ impl Eq for EvilTwin {}
+
+ #[derive(PartialEq, Eq, PartialOrd, Ord)]
+ struct CompositeKey(i32, EvilTwin);
+
+ impl Borrow<EvilTwin> for CompositeKey {
+ fn borrow(&self) -> &EvilTwin {
+ &self.1
+ }
+ }
+
+ let map = BTreeMap::from_iter((0..12).map(|i| (CompositeKey(i, EvilTwin(i)), ())));
+ let _ = map.range(EvilTwin(5)..=EvilTwin(7));
+}
+
+#[test]
+fn test_range_1000() {
+ // Miri is too slow
+ let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 as u32 } else { 1000 };
+ let map = BTreeMap::from_iter((0..size).map(|i| (i, i)));
+
+ fn test(map: &BTreeMap<u32, u32>, size: u32, min: Bound<&u32>, max: Bound<&u32>) {
+ let mut kvs = map.range((min, max)).map(|(&k, &v)| (k, v));
+ let mut pairs = (0..size).map(|i| (i, i));
+
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
+ }
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
+ }
+ test(&map, size, Included(&0), Excluded(&size));
+ test(&map, size, Unbounded, Excluded(&size));
+ test(&map, size, Included(&0), Included(&(size - 1)));
+ test(&map, size, Unbounded, Included(&(size - 1)));
+ test(&map, size, Included(&0), Unbounded);
+ test(&map, size, Unbounded, Unbounded);
+}
+
+#[test]
+fn test_range_borrowed_key() {
+ let mut map = BTreeMap::new();
+ map.insert("aardvark".to_string(), 1);
+ map.insert("baboon".to_string(), 2);
+ map.insert("coyote".to_string(), 3);
+ map.insert("dingo".to_string(), 4);
+ // NOTE: would like to use simply "b".."d" here...
+ let mut iter = map.range::<str, _>((Included("b"), Excluded("d")));
+ assert_eq!(iter.next(), Some((&"baboon".to_string(), &2)));
+ assert_eq!(iter.next(), Some((&"coyote".to_string(), &3)));
+ assert_eq!(iter.next(), None);
+}
+
+#[test]
+fn test_range() {
+ let size = 200;
+ // Miri is too slow
+ let step = if cfg!(miri) { 66 } else { 1 };
+ let map = BTreeMap::from_iter((0..size).map(|i| (i, i)));
+
+ for i in (0..size).step_by(step) {
+ for j in (i..size).step_by(step) {
+ let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
+ let mut pairs = (i..=j).map(|i| (i, i));
+
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
+ }
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
+ }
+ }
+}
+
+#[test]
+fn test_range_mut() {
+ let size = 200;
+ // Miri is too slow
+ let step = if cfg!(miri) { 66 } else { 1 };
+ let mut map = BTreeMap::from_iter((0..size).map(|i| (i, i)));
+
+ for i in (0..size).step_by(step) {
+ for j in (i..size).step_by(step) {
+ let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
+ let mut pairs = (i..=j).map(|i| (i, i));
+
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
+ }
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
+ }
+ }
+ map.check();
+}
+
+#[should_panic(expected = "range start is greater than range end in BTreeMap")]
+#[test]
+fn test_range_panic_1() {
+ let mut map = BTreeMap::new();
+ map.insert(3, "a");
+ map.insert(5, "b");
+ map.insert(8, "c");
+
+ let _invalid_range = map.range((Included(&8), Included(&3)));
+}
+
+#[should_panic(expected = "range start and end are equal and excluded in BTreeMap")]
+#[test]
+fn test_range_panic_2() {
+ let mut map = BTreeMap::new();
+ map.insert(3, "a");
+ map.insert(5, "b");
+ map.insert(8, "c");
+
+ let _invalid_range = map.range((Excluded(&5), Excluded(&5)));
+}
+
+#[should_panic(expected = "range start and end are equal and excluded in BTreeMap")]
+#[test]
+fn test_range_panic_3() {
+ let mut map: BTreeMap<i32, ()> = BTreeMap::new();
+ map.insert(3, ());
+ map.insert(5, ());
+ map.insert(8, ());
+
+ let _invalid_range = map.range((Excluded(&5), Excluded(&5)));
+}
+
+#[test]
+fn test_retain() {
+ let mut map = BTreeMap::from_iter((0..100).map(|x| (x, x * 10)));
+
+ map.retain(|&k, _| k % 2 == 0);
+ assert_eq!(map.len(), 50);
+ assert_eq!(map[&2], 20);
+ assert_eq!(map[&4], 40);
+ assert_eq!(map[&6], 60);
+}
+
+mod test_drain_filter {
+ use super::*;
+
+ #[test]
+ fn empty() {
+ let mut map: BTreeMap<i32, i32> = BTreeMap::new();
+ map.drain_filter(|_, _| unreachable!("there's nothing to decide on"));
+ assert_eq!(map.height(), None);
+ map.check();
+ }
+
+ // Explicitly consumes the iterator, where most test cases drop it instantly.
+ #[test]
+ fn consumed_keeping_all() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ assert!(map.drain_filter(|_, _| false).eq(iter::empty()));
+ map.check();
+ }
+
+ // Explicitly consumes the iterator, where most test cases drop it instantly.
+ #[test]
+ fn consumed_removing_all() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ assert!(map.drain_filter(|_, _| true).eq(pairs));
+ assert!(map.is_empty());
+ map.check();
+ }
+
+ // Explicitly consumes the iterator and modifies values through it.
+ #[test]
+ fn mutating_and_keeping() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ assert!(
+ map.drain_filter(|_, v| {
+ *v += 6;
+ false
+ })
+ .eq(iter::empty())
+ );
+ assert!(map.keys().copied().eq(0..3));
+ assert!(map.values().copied().eq(6..9));
+ map.check();
+ }
+
+ // Explicitly consumes the iterator and modifies values through it.
+ #[test]
+ fn mutating_and_removing() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ assert!(
+ map.drain_filter(|_, v| {
+ *v += 6;
+ true
+ })
+ .eq((0..3).map(|i| (i, i + 6)))
+ );
+ assert!(map.is_empty());
+ map.check();
+ }
+
+ #[test]
+ fn underfull_keeping_all() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ map.drain_filter(|_, _| false);
+ assert!(map.keys().copied().eq(0..3));
+ map.check();
+ }
+
+ #[test]
+ fn underfull_removing_one() {
+ let pairs = (0..3).map(|i| (i, i));
+ for doomed in 0..3 {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), 2);
+ map.check();
+ }
+ }
+
+ #[test]
+ fn underfull_keeping_one() {
+ let pairs = (0..3).map(|i| (i, i));
+ for sacred in 0..3 {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ map.check();
+ }
+ }
+
+ #[test]
+ fn underfull_removing_all() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ map.check();
+ }
+
+ #[test]
+ fn height_0_keeping_all() {
+ let pairs = (0..node::CAPACITY).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ map.drain_filter(|_, _| false);
+ assert!(map.keys().copied().eq(0..node::CAPACITY));
+ map.check();
+ }
+
+ #[test]
+ fn height_0_removing_one() {
+ let pairs = (0..node::CAPACITY).map(|i| (i, i));
+ for doomed in 0..node::CAPACITY {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), node::CAPACITY - 1);
+ map.check();
+ }
+ }
+
+ #[test]
+ fn height_0_keeping_one() {
+ let pairs = (0..node::CAPACITY).map(|i| (i, i));
+ for sacred in 0..node::CAPACITY {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ map.check();
+ }
+ }
+
+ #[test]
+ fn height_0_removing_all() {
+ let pairs = (0..node::CAPACITY).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ map.check();
+ }
+
+ #[test]
+ fn height_0_keeping_half() {
+ let mut map = BTreeMap::from_iter((0..16).map(|i| (i, i)));
+ assert_eq!(map.drain_filter(|i, _| *i % 2 == 0).count(), 8);
+ assert_eq!(map.len(), 8);
+ map.check();
+ }
+
+ #[test]
+ fn height_1_removing_all() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ map.check();
+ }
+
+ #[test]
+ fn height_1_removing_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
+ for doomed in 0..MIN_INSERTS_HEIGHT_1 {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1 - 1);
+ map.check();
+ }
+ }
+
+ #[test]
+ fn height_1_keeping_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
+ for sacred in 0..MIN_INSERTS_HEIGHT_1 {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ map.check();
+ }
+ }
+
+ #[test]
+ fn height_2_removing_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ for doomed in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ map.check();
+ }
+ }
+
+ #[test]
+ fn height_2_keeping_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ for sacred in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
+ let mut map = BTreeMap::from_iter(pairs.clone());
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ map.check();
+ }
+ }
+
+ #[test]
+ fn height_2_removing_all() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ let mut map = BTreeMap::from_iter(pairs);
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ map.check();
+ }
+
+ #[test]
+ fn drop_panic_leak() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+ let mut map = BTreeMap::new();
+ map.insert(a.spawn(Panic::Never), ());
+ map.insert(b.spawn(Panic::InDrop), ());
+ map.insert(c.spawn(Panic::Never), ());
+
+ catch_unwind(move || drop(map.drain_filter(|dummy, _| dummy.query(true)))).unwrap_err();
+
+ assert_eq!(a.queried(), 1);
+ assert_eq!(b.queried(), 1);
+ assert_eq!(c.queried(), 0);
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 1);
+ assert_eq!(c.dropped(), 1);
+ }
+
+ #[test]
+ fn pred_panic_leak() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+ let mut map = BTreeMap::new();
+ map.insert(a.spawn(Panic::Never), ());
+ map.insert(b.spawn(Panic::InQuery), ());
+ map.insert(c.spawn(Panic::InQuery), ());
+
+ catch_unwind(AssertUnwindSafe(|| drop(map.drain_filter(|dummy, _| dummy.query(true)))))
+ .unwrap_err();
+
+ assert_eq!(a.queried(), 1);
+ assert_eq!(b.queried(), 1);
+ assert_eq!(c.queried(), 0);
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 0);
+ assert_eq!(c.dropped(), 0);
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.first_entry().unwrap().key().id(), 1);
+ assert_eq!(map.last_entry().unwrap().key().id(), 2);
+ map.check();
+ }
+
+ // Same as above, but attempt to use the iterator again after the panic in the predicate
+ #[test]
+ fn pred_panic_reuse() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+ let mut map = BTreeMap::new();
+ map.insert(a.spawn(Panic::Never), ());
+ map.insert(b.spawn(Panic::InQuery), ());
+ map.insert(c.spawn(Panic::InQuery), ());
+
+ {
+ let mut it = map.drain_filter(|dummy, _| dummy.query(true));
+ catch_unwind(AssertUnwindSafe(|| while it.next().is_some() {})).unwrap_err();
+ // Iterator behaviour after a panic is explicitly unspecified,
+ // so this is just the current implementation:
+ let result = catch_unwind(AssertUnwindSafe(|| it.next()));
+ assert!(matches!(result, Ok(None)));
+ }
+
+ assert_eq!(a.queried(), 1);
+ assert_eq!(b.queried(), 1);
+ assert_eq!(c.queried(), 0);
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 0);
+ assert_eq!(c.dropped(), 0);
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.first_entry().unwrap().key().id(), 1);
+ assert_eq!(map.last_entry().unwrap().key().id(), 2);
+ map.check();
+ }
+}
+
+#[test]
+fn test_borrow() {
+ // make sure these compile -- using the Borrow trait
+ {
+ let mut map = BTreeMap::new();
+ map.insert("0".to_string(), 1);
+ assert_eq!(map["0"], 1);
+ }
+
+ {
+ let mut map = BTreeMap::new();
+ map.insert(Box::new(0), 1);
+ assert_eq!(map[&0], 1);
+ }
+
+ {
+ let mut map = BTreeMap::new();
+ map.insert(Box::new([0, 1]) as Box<[i32]>, 1);
+ assert_eq!(map[&[0, 1][..]], 1);
+ }
+
+ {
+ let mut map = BTreeMap::new();
+ map.insert(Rc::new(0), 1);
+ assert_eq!(map[&0], 1);
+ }
+
+ #[allow(dead_code)]
+ fn get<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
+ let _ = v.get(t);
+ }
+
+ #[allow(dead_code)]
+ fn get_mut<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+ let _ = v.get_mut(t);
+ }
+
+ #[allow(dead_code)]
+ fn get_key_value<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
+ let _ = v.get_key_value(t);
+ }
+
+ #[allow(dead_code)]
+ fn contains_key<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: &T) {
+ let _ = v.contains_key(t);
+ }
+
+ #[allow(dead_code)]
+ fn range<T: Ord>(v: &BTreeMap<Box<T>, ()>, t: T) {
+ let _ = v.range(t..);
+ }
+
+ #[allow(dead_code)]
+ fn range_mut<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: T) {
+ let _ = v.range_mut(t..);
+ }
+
+ #[allow(dead_code)]
+ fn remove<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+ v.remove(t);
+ }
+
+ #[allow(dead_code)]
+ fn remove_entry<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+ v.remove_entry(t);
+ }
+
+ #[allow(dead_code)]
+ fn split_off<T: Ord>(v: &mut BTreeMap<Box<T>, ()>, t: &T) {
+ v.split_off(t);
+ }
+}
+
+#[test]
+fn test_entry() {
+ let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
+
+ let mut map = BTreeMap::from(xs);
+
+ // Existing key (insert)
+ match map.entry(1) {
+ Vacant(_) => unreachable!(),
+ Occupied(mut view) => {
+ assert_eq!(view.get(), &10);
+ assert_eq!(view.insert(100), 10);
+ }
+ }
+ assert_eq!(map.get(&1).unwrap(), &100);
+ assert_eq!(map.len(), 6);
+
+ // Existing key (update)
+ match map.entry(2) {
+ Vacant(_) => unreachable!(),
+ Occupied(mut view) => {
+ let v = view.get_mut();
+ *v *= 10;
+ }
+ }
+ assert_eq!(map.get(&2).unwrap(), &200);
+ assert_eq!(map.len(), 6);
+ map.check();
+
+ // Existing key (take)
+ match map.entry(3) {
+ Vacant(_) => unreachable!(),
+ Occupied(view) => {
+ assert_eq!(view.remove(), 30);
+ }
+ }
+ assert_eq!(map.get(&3), None);
+ assert_eq!(map.len(), 5);
+ map.check();
+
+ // Inexistent key (insert)
+ match map.entry(10) {
+ Occupied(_) => unreachable!(),
+ Vacant(view) => {
+ assert_eq!(*view.insert(1000), 1000);
+ }
+ }
+ assert_eq!(map.get(&10).unwrap(), &1000);
+ assert_eq!(map.len(), 6);
+ map.check();
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = BTreeMap::new();
+ a.insert(1, "one");
+ let mut b = BTreeMap::new();
+ b.insert(2, "two");
+ b.insert(3, "three");
+
+ a.extend(&b);
+
+ assert_eq!(a.len(), 3);
+ assert_eq!(a[&1], "one");
+ assert_eq!(a[&2], "two");
+ assert_eq!(a[&3], "three");
+ a.check();
+}
+
+#[test]
+fn test_zst() {
+ let mut m = BTreeMap::new();
+ assert_eq!(m.len(), 0);
+
+ assert_eq!(m.insert((), ()), None);
+ assert_eq!(m.len(), 1);
+
+ assert_eq!(m.insert((), ()), Some(()));
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.iter().count(), 1);
+
+ m.clear();
+ assert_eq!(m.len(), 0);
+
+ for _ in 0..100 {
+ m.insert((), ());
+ }
+
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.iter().count(), 1);
+ m.check();
+}
+
+// This test's only purpose is to ensure that zero-sized keys with nonsensical orderings
+// do not cause segfaults when used with zero-sized values. All other map behavior is
+// undefined.
+#[test]
+fn test_bad_zst() {
+ #[derive(Clone, Copy, Debug)]
+ struct Bad;
+
+ impl PartialEq for Bad {
+ fn eq(&self, _: &Self) -> bool {
+ false
+ }
+ }
+
+ impl Eq for Bad {}
+
+ impl PartialOrd for Bad {
+ fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
+ Some(Ordering::Less)
+ }
+ }
+
+ impl Ord for Bad {
+ fn cmp(&self, _: &Self) -> Ordering {
+ Ordering::Less
+ }
+ }
+
+ let mut m = BTreeMap::new();
+
+ for _ in 0..100 {
+ m.insert(Bad, Bad);
+ }
+ m.check();
+}
+
+#[test]
+fn test_clear() {
+ let mut map = BTreeMap::new();
+ for &len in &[MIN_INSERTS_HEIGHT_1, MIN_INSERTS_HEIGHT_2, 0, node::CAPACITY] {
+ for i in 0..len {
+ map.insert(i, ());
+ }
+ assert_eq!(map.len(), len);
+ map.clear();
+ map.check();
+ assert_eq!(map.height(), None);
+ }
+}
+
+#[test]
+fn test_clear_drop_panic_leak() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+
+ let mut map = BTreeMap::new();
+ map.insert(a.spawn(Panic::Never), ());
+ map.insert(b.spawn(Panic::InDrop), ());
+ map.insert(c.spawn(Panic::Never), ());
+
+ catch_unwind(AssertUnwindSafe(|| map.clear())).unwrap_err();
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 1);
+ assert_eq!(c.dropped(), 1);
+ assert_eq!(map.len(), 0);
+
+ drop(map);
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 1);
+ assert_eq!(c.dropped(), 1);
+}
+
+#[test]
+fn test_clone() {
+ let mut map = BTreeMap::new();
+ let size = MIN_INSERTS_HEIGHT_1;
+ assert_eq!(map.len(), 0);
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 10 * i), None);
+ assert_eq!(map.len(), i + 1);
+ map.check();
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 100 * i), Some(10 * i));
+ assert_eq!(map.len(), size);
+ map.check();
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(i * 2)), Some(i * 200));
+ assert_eq!(map.len(), size - i - 1);
+ map.check();
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(2 * i)), None);
+ assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
+ assert_eq!(map.len(), size / 2 - i - 1);
+ map.check();
+ assert_eq!(map, map.clone());
+ }
+
+ // Test a tree with 2 semi-full levels and a tree with 3 levels.
+ map = BTreeMap::from_iter((1..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)));
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ assert_eq!(map, map.clone());
+ map.insert(0, 0);
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2);
+ assert_eq!(map, map.clone());
+ map.check();
+}
+
+fn test_clone_panic_leak(size: usize) {
+ for i in 0..size {
+ let dummies = Vec::from_iter((0..size).map(|id| CrashTestDummy::new(id)));
+ let map = BTreeMap::from_iter(dummies.iter().map(|dummy| {
+ let panic = if dummy.id == i { Panic::InClone } else { Panic::Never };
+ (dummy.spawn(panic), ())
+ }));
+
+ catch_unwind(|| map.clone()).unwrap_err();
+ for d in &dummies {
+ assert_eq!(d.cloned(), if d.id <= i { 1 } else { 0 }, "id={}/{}", d.id, i);
+ assert_eq!(d.dropped(), if d.id < i { 1 } else { 0 }, "id={}/{}", d.id, i);
+ }
+ assert_eq!(map.len(), size);
+
+ drop(map);
+ for d in &dummies {
+ assert_eq!(d.cloned(), if d.id <= i { 1 } else { 0 }, "id={}/{}", d.id, i);
+ assert_eq!(d.dropped(), if d.id < i { 2 } else { 1 }, "id={}/{}", d.id, i);
+ }
+ }
+}
+
+#[test]
+fn test_clone_panic_leak_height_0() {
+ test_clone_panic_leak(3)
+}
+
+#[test]
+fn test_clone_panic_leak_height_1() {
+ test_clone_panic_leak(MIN_INSERTS_HEIGHT_1)
+}
+
+#[test]
+fn test_clone_from() {
+ let mut map1 = BTreeMap::new();
+ let max_size = MIN_INSERTS_HEIGHT_1;
+
+ // Range to max_size inclusive, because i is the size of map1 being tested.
+ for i in 0..=max_size {
+ let mut map2 = BTreeMap::new();
+ for j in 0..i {
+ let mut map1_copy = map2.clone();
+ map1_copy.clone_from(&map1); // small cloned from large
+ assert_eq!(map1_copy, map1);
+ let mut map2_copy = map1.clone();
+ map2_copy.clone_from(&map2); // large cloned from small
+ assert_eq!(map2_copy, map2);
+ map2.insert(100 * j + 1, 2 * j + 1);
+ }
+ map2.clone_from(&map1); // same length
+ map2.check();
+ assert_eq!(map2, map1);
+ map1.insert(i, 10 * i);
+ map1.check();
+ }
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> {
+ v
+ }
+ fn map_val<'new>(v: BTreeMap<(), &'static str>) -> BTreeMap<(), &'new str> {
+ v
+ }
+
+ fn iter_key<'a, 'new>(v: Iter<'a, &'static str, ()>) -> Iter<'a, &'new str, ()> {
+ v
+ }
+ fn iter_val<'a, 'new>(v: Iter<'a, (), &'static str>) -> Iter<'a, (), &'new str> {
+ v
+ }
+
+ fn into_iter_key<'new>(v: IntoIter<&'static str, ()>) -> IntoIter<&'new str, ()> {
+ v
+ }
+ fn into_iter_val<'new>(v: IntoIter<(), &'static str>) -> IntoIter<(), &'new str> {
+ v
+ }
+
+ fn into_keys_key<'new>(v: IntoKeys<&'static str, ()>) -> IntoKeys<&'new str, ()> {
+ v
+ }
+ fn into_keys_val<'new>(v: IntoKeys<(), &'static str>) -> IntoKeys<(), &'new str> {
+ v
+ }
+
+ fn into_values_key<'new>(v: IntoValues<&'static str, ()>) -> IntoValues<&'new str, ()> {
+ v
+ }
+ fn into_values_val<'new>(v: IntoValues<(), &'static str>) -> IntoValues<(), &'new str> {
+ v
+ }
+
+ fn range_key<'a, 'new>(v: Range<'a, &'static str, ()>) -> Range<'a, &'new str, ()> {
+ v
+ }
+ fn range_val<'a, 'new>(v: Range<'a, (), &'static str>) -> Range<'a, (), &'new str> {
+ v
+ }
+
+ fn keys_key<'a, 'new>(v: Keys<'a, &'static str, ()>) -> Keys<'a, &'new str, ()> {
+ v
+ }
+ fn keys_val<'a, 'new>(v: Keys<'a, (), &'static str>) -> Keys<'a, (), &'new str> {
+ v
+ }
+
+ fn values_key<'a, 'new>(v: Values<'a, &'static str, ()>) -> Values<'a, &'new str, ()> {
+ v
+ }
+ fn values_val<'a, 'new>(v: Values<'a, (), &'static str>) -> Values<'a, (), &'new str> {
+ v
+ }
+}
+
+#[allow(dead_code)]
+fn assert_sync() {
+ fn map<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
+ v
+ }
+
+ fn into_iter<T: Sync>(v: BTreeMap<T, T>) -> impl Sync {
+ v.into_iter()
+ }
+
+ fn into_keys<T: Sync + Ord>(v: BTreeMap<T, T>) -> impl Sync {
+ v.into_keys()
+ }
+
+ fn into_values<T: Sync + Ord>(v: BTreeMap<T, T>) -> impl Sync {
+ v.into_values()
+ }
+
+ fn drain_filter<T: Sync + Ord>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ v.drain_filter(|_, _| false)
+ }
+
+ fn iter<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
+ v.iter()
+ }
+
+ fn iter_mut<T: Sync>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ v.iter_mut()
+ }
+
+ fn keys<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
+ v.keys()
+ }
+
+ fn values<T: Sync>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
+ v.values()
+ }
+
+ fn values_mut<T: Sync>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ v.values_mut()
+ }
+
+ fn range<T: Sync + Ord>(v: &BTreeMap<T, T>) -> impl Sync + '_ {
+ v.range(..)
+ }
+
+ fn range_mut<T: Sync + Ord>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ v.range_mut(..)
+ }
+
+ fn entry<T: Sync + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ v.entry(Default::default())
+ }
+
+ fn occupied_entry<T: Sync + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ match v.entry(Default::default()) {
+ Occupied(entry) => entry,
+ _ => unreachable!(),
+ }
+ }
+
+ fn vacant_entry<T: Sync + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Sync + '_ {
+ match v.entry(Default::default()) {
+ Vacant(entry) => entry,
+ _ => unreachable!(),
+ }
+ }
+}
+
+#[allow(dead_code)]
+fn assert_send() {
+ fn map<T: Send>(v: BTreeMap<T, T>) -> impl Send {
+ v
+ }
+
+ fn into_iter<T: Send>(v: BTreeMap<T, T>) -> impl Send {
+ v.into_iter()
+ }
+
+ fn into_keys<T: Send + Ord>(v: BTreeMap<T, T>) -> impl Send {
+ v.into_keys()
+ }
+
+ fn into_values<T: Send + Ord>(v: BTreeMap<T, T>) -> impl Send {
+ v.into_values()
+ }
+
+ fn drain_filter<T: Send + Ord>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ v.drain_filter(|_, _| false)
+ }
+
+ fn iter<T: Send + Sync>(v: &BTreeMap<T, T>) -> impl Send + '_ {
+ v.iter()
+ }
+
+ fn iter_mut<T: Send>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ v.iter_mut()
+ }
+
+ fn keys<T: Send + Sync>(v: &BTreeMap<T, T>) -> impl Send + '_ {
+ v.keys()
+ }
+
+ fn values<T: Send + Sync>(v: &BTreeMap<T, T>) -> impl Send + '_ {
+ v.values()
+ }
+
+ fn values_mut<T: Send>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ v.values_mut()
+ }
+
+ fn range<T: Send + Sync + Ord>(v: &BTreeMap<T, T>) -> impl Send + '_ {
+ v.range(..)
+ }
+
+ fn range_mut<T: Send + Ord>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ v.range_mut(..)
+ }
+
+ fn entry<T: Send + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ v.entry(Default::default())
+ }
+
+ fn occupied_entry<T: Send + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ match v.entry(Default::default()) {
+ Occupied(entry) => entry,
+ _ => unreachable!(),
+ }
+ }
+
+ fn vacant_entry<T: Send + Ord + Default>(v: &mut BTreeMap<T, T>) -> impl Send + '_ {
+ match v.entry(Default::default()) {
+ Vacant(entry) => entry,
+ _ => unreachable!(),
+ }
+ }
+}
+
+#[test]
+fn test_ord_absence() {
+ fn map<K>(mut map: BTreeMap<K, ()>) {
+ let _ = map.is_empty();
+ let _ = map.len();
+ map.clear();
+ let _ = map.iter();
+ let _ = map.iter_mut();
+ let _ = map.keys();
+ let _ = map.values();
+ let _ = map.values_mut();
+ if true {
+ let _ = map.into_values();
+ } else if true {
+ let _ = map.into_iter();
+ } else {
+ let _ = map.into_keys();
+ }
+ }
+
+ fn map_debug<K: Debug>(mut map: BTreeMap<K, ()>) {
+ format!("{map:?}");
+ format!("{:?}", map.iter());
+ format!("{:?}", map.iter_mut());
+ format!("{:?}", map.keys());
+ format!("{:?}", map.values());
+ format!("{:?}", map.values_mut());
+ if true {
+ format!("{:?}", map.into_iter());
+ } else if true {
+ format!("{:?}", map.into_keys());
+ } else {
+ format!("{:?}", map.into_values());
+ }
+ }
+
+ fn map_clone<K: Clone>(mut map: BTreeMap<K, ()>) {
+ map.clone_from(&map.clone());
+ }
+
+ #[derive(Debug, Clone)]
+ struct NonOrd;
+ map(BTreeMap::<NonOrd, _>::new());
+ map_debug(BTreeMap::<NonOrd, _>::new());
+ map_clone(BTreeMap::<NonOrd, _>::default());
+}
+
+#[test]
+fn test_occupied_entry_key() {
+ let mut a = BTreeMap::new();
+ let key = "hello there";
+ let value = "value goes here";
+ assert_eq!(a.height(), None);
+ a.insert(key, value);
+ assert_eq!(a.len(), 1);
+ assert_eq!(a[key], value);
+
+ match a.entry(key) {
+ Vacant(_) => panic!(),
+ Occupied(e) => assert_eq!(key, *e.key()),
+ }
+ assert_eq!(a.len(), 1);
+ assert_eq!(a[key], value);
+ a.check();
+}
+
+#[test]
+fn test_vacant_entry_key() {
+ let mut a = BTreeMap::new();
+ let key = "hello there";
+ let value = "value goes here";
+
+ assert_eq!(a.height(), None);
+ match a.entry(key) {
+ Occupied(_) => unreachable!(),
+ Vacant(e) => {
+ assert_eq!(key, *e.key());
+ e.insert(value);
+ }
+ }
+ assert_eq!(a.len(), 1);
+ assert_eq!(a[key], value);
+ a.check();
+}
+
+#[test]
+fn test_vacant_entry_no_insert() {
+ let mut a = BTreeMap::<&str, ()>::new();
+ let key = "hello there";
+
+ // Non-allocated
+ assert_eq!(a.height(), None);
+ match a.entry(key) {
+ Occupied(_) => unreachable!(),
+ Vacant(e) => assert_eq!(key, *e.key()),
+ }
+ // Ensures the tree has no root.
+ assert_eq!(a.height(), None);
+ a.check();
+
+ // Allocated but still empty
+ a.insert(key, ());
+ a.remove(&key);
+ assert_eq!(a.height(), Some(0));
+ assert!(a.is_empty());
+ match a.entry(key) {
+ Occupied(_) => unreachable!(),
+ Vacant(e) => assert_eq!(key, *e.key()),
+ }
+ // Ensures the allocated root is not changed.
+ assert_eq!(a.height(), Some(0));
+ assert!(a.is_empty());
+ a.check();
+}
+
+#[test]
+fn test_first_last_entry() {
+ let mut a = BTreeMap::new();
+ assert!(a.first_entry().is_none());
+ assert!(a.last_entry().is_none());
+ a.insert(1, 42);
+ assert_eq!(a.first_entry().unwrap().key(), &1);
+ assert_eq!(a.last_entry().unwrap().key(), &1);
+ a.insert(2, 24);
+ assert_eq!(a.first_entry().unwrap().key(), &1);
+ assert_eq!(a.last_entry().unwrap().key(), &2);
+ a.insert(0, 6);
+ assert_eq!(a.first_entry().unwrap().key(), &0);
+ assert_eq!(a.last_entry().unwrap().key(), &2);
+ let (k1, v1) = a.first_entry().unwrap().remove_entry();
+ assert_eq!(k1, 0);
+ assert_eq!(v1, 6);
+ let (k2, v2) = a.last_entry().unwrap().remove_entry();
+ assert_eq!(k2, 2);
+ assert_eq!(v2, 24);
+ assert_eq!(a.first_entry().unwrap().key(), &1);
+ assert_eq!(a.last_entry().unwrap().key(), &1);
+ a.check();
+}
+
+#[test]
+fn test_pop_first_last() {
+ let mut map = BTreeMap::new();
+ assert_eq!(map.pop_first(), None);
+ assert_eq!(map.pop_last(), None);
+
+ map.insert(1, 10);
+ map.insert(2, 20);
+ map.insert(3, 30);
+ map.insert(4, 40);
+
+ assert_eq!(map.len(), 4);
+
+ let (key, val) = map.pop_first().unwrap();
+ assert_eq!(key, 1);
+ assert_eq!(val, 10);
+ assert_eq!(map.len(), 3);
+
+ let (key, val) = map.pop_first().unwrap();
+ assert_eq!(key, 2);
+ assert_eq!(val, 20);
+ assert_eq!(map.len(), 2);
+ let (key, val) = map.pop_last().unwrap();
+ assert_eq!(key, 4);
+ assert_eq!(val, 40);
+ assert_eq!(map.len(), 1);
+
+ map.insert(5, 50);
+ map.insert(6, 60);
+ assert_eq!(map.len(), 3);
+
+ let (key, val) = map.pop_first().unwrap();
+ assert_eq!(key, 3);
+ assert_eq!(val, 30);
+ assert_eq!(map.len(), 2);
+
+ let (key, val) = map.pop_last().unwrap();
+ assert_eq!(key, 6);
+ assert_eq!(val, 60);
+ assert_eq!(map.len(), 1);
+
+ let (key, val) = map.pop_last().unwrap();
+ assert_eq!(key, 5);
+ assert_eq!(val, 50);
+ assert_eq!(map.len(), 0);
+
+ assert_eq!(map.pop_first(), None);
+ assert_eq!(map.pop_last(), None);
+
+ map.insert(7, 70);
+ map.insert(8, 80);
+
+ let (key, val) = map.pop_last().unwrap();
+ assert_eq!(key, 8);
+ assert_eq!(val, 80);
+ assert_eq!(map.len(), 1);
+
+ let (key, val) = map.pop_last().unwrap();
+ assert_eq!(key, 7);
+ assert_eq!(val, 70);
+ assert_eq!(map.len(), 0);
+
+ assert_eq!(map.pop_first(), None);
+ assert_eq!(map.pop_last(), None);
+}
+
+#[test]
+fn test_get_key_value() {
+ let mut map = BTreeMap::new();
+
+ assert!(map.is_empty());
+ assert_eq!(map.get_key_value(&1), None);
+ assert_eq!(map.get_key_value(&2), None);
+
+ map.insert(1, 10);
+ map.insert(2, 20);
+ map.insert(3, 30);
+
+ assert_eq!(map.len(), 3);
+ assert_eq!(map.get_key_value(&1), Some((&1, &10)));
+ assert_eq!(map.get_key_value(&3), Some((&3, &30)));
+ assert_eq!(map.get_key_value(&4), None);
+
+ map.remove(&3);
+
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.get_key_value(&3), None);
+ assert_eq!(map.get_key_value(&2), Some((&2, &20)));
+}
+
+#[test]
+fn test_insert_into_full_height_0() {
+ let size = node::CAPACITY;
+ for pos in 0..=size {
+ let mut map = BTreeMap::from_iter((0..size).map(|i| (i * 2 + 1, ())));
+ assert!(map.insert(pos * 2, ()).is_none());
+ map.check();
+ }
+}
+
+#[test]
+fn test_insert_into_full_height_1() {
+ let size = node::CAPACITY + 1 + node::CAPACITY;
+ for pos in 0..=size {
+ let mut map = BTreeMap::from_iter((0..size).map(|i| (i * 2 + 1, ())));
+ map.compact();
+ let root_node = map.root.as_ref().unwrap().reborrow();
+ assert_eq!(root_node.len(), 1);
+ assert_eq!(root_node.first_leaf_edge().into_node().len(), node::CAPACITY);
+ assert_eq!(root_node.last_leaf_edge().into_node().len(), node::CAPACITY);
+
+ assert!(map.insert(pos * 2, ()).is_none());
+ map.check();
+ }
+}
+
+#[test]
+fn test_try_insert() {
+ let mut map = BTreeMap::new();
+
+ assert!(map.is_empty());
+
+ assert_eq!(map.try_insert(1, 10).unwrap(), &10);
+ assert_eq!(map.try_insert(2, 20).unwrap(), &20);
+
+ let err = map.try_insert(2, 200).unwrap_err();
+ assert_eq!(err.entry.key(), &2);
+ assert_eq!(err.entry.get(), &20);
+ assert_eq!(err.value, 200);
+}
+
+macro_rules! create_append_test {
+ ($name:ident, $len:expr) => {
+ #[test]
+ fn $name() {
+ let mut a = BTreeMap::new();
+ for i in 0..8 {
+ a.insert(i, i);
+ }
+
+ let mut b = BTreeMap::new();
+ for i in 5..$len {
+ b.insert(i, 2 * i);
+ }
+
+ a.append(&mut b);
+
+ assert_eq!(a.len(), $len);
+ assert_eq!(b.len(), 0);
+
+ for i in 0..$len {
+ if i < 5 {
+ assert_eq!(a[&i], i);
+ } else {
+ assert_eq!(a[&i], 2 * i);
+ }
+ }
+
+ a.check();
+ assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1)));
+ assert_eq!(a.insert($len - 1, 20), None);
+ a.check();
+ }
+ };
+}
+
+// These are mostly for testing the algorithm that "fixes" the right edge after insertion.
+// Single node.
+create_append_test!(test_append_9, 9);
+// Two leafs that don't need fixing.
+create_append_test!(test_append_17, 17);
+// Two leafs where the second one ends up underfull and needs stealing at the end.
+create_append_test!(test_append_14, 14);
+// Two leafs where the second one ends up empty because the insertion finished at the root.
+create_append_test!(test_append_12, 12);
+// Three levels; insertion finished at the root.
+create_append_test!(test_append_144, 144);
+// Three levels; insertion finished at leaf while there is an empty node on the second level.
+create_append_test!(test_append_145, 145);
+// Tests for several randomly chosen sizes.
+create_append_test!(test_append_170, 170);
+create_append_test!(test_append_181, 181);
+#[cfg(not(miri))] // Miri is too slow
+create_append_test!(test_append_239, 239);
+#[cfg(not(miri))] // Miri is too slow
+create_append_test!(test_append_1700, 1700);
+
+#[test]
+fn test_append_drop_leak() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+ let mut left = BTreeMap::new();
+ let mut right = BTreeMap::new();
+ left.insert(a.spawn(Panic::Never), ());
+ left.insert(b.spawn(Panic::InDrop), ()); // first duplicate key, dropped during append
+ left.insert(c.spawn(Panic::Never), ());
+ right.insert(b.spawn(Panic::Never), ());
+ right.insert(c.spawn(Panic::Never), ());
+
+ catch_unwind(move || left.append(&mut right)).unwrap_err();
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 1); // should be 2 were it not for Rust issue #47949
+ assert_eq!(c.dropped(), 2);
+}
+
+#[test]
+fn test_append_ord_chaos() {
+ let mut map1 = BTreeMap::new();
+ map1.insert(Cyclic3::A, ());
+ map1.insert(Cyclic3::B, ());
+ let mut map2 = BTreeMap::new();
+ map2.insert(Cyclic3::A, ());
+ map2.insert(Cyclic3::B, ());
+ map2.insert(Cyclic3::C, ()); // lands first, before A
+ map2.insert(Cyclic3::B, ()); // lands first, before C
+ map1.check();
+ map2.check(); // keys are not unique but still strictly ascending
+ assert_eq!(map1.len(), 2);
+ assert_eq!(map2.len(), 4);
+ map1.append(&mut map2);
+ assert_eq!(map1.len(), 5);
+ assert_eq!(map2.len(), 0);
+ map1.check();
+ map2.check();
+}
+
+fn rand_data(len: usize) -> Vec<(u32, u32)> {
+ let mut rng = DeterministicRng::new();
+ Vec::from_iter((0..len).map(|_| (rng.next(), rng.next())))
+}
+
+#[test]
+fn test_split_off_empty_right() {
+ let mut data = rand_data(173);
+
+ let mut map = BTreeMap::from_iter(data.clone());
+ let right = map.split_off(&(data.iter().max().unwrap().0 + 1));
+ map.check();
+ right.check();
+
+ data.sort();
+ assert!(map.into_iter().eq(data));
+ assert!(right.into_iter().eq(None));
+}
+
+#[test]
+fn test_split_off_empty_left() {
+ let mut data = rand_data(314);
+
+ let mut map = BTreeMap::from_iter(data.clone());
+ let right = map.split_off(&data.iter().min().unwrap().0);
+ map.check();
+ right.check();
+
+ data.sort();
+ assert!(map.into_iter().eq(None));
+ assert!(right.into_iter().eq(data));
+}
+
+// In a tree with 3 levels, if all but a part of the first leaf node is split off,
+// make sure fix_top eliminates both top levels.
+#[test]
+fn test_split_off_tiny_left_height_2() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ let mut left = BTreeMap::from_iter(pairs.clone());
+ let right = left.split_off(&1);
+ left.check();
+ right.check();
+ assert_eq!(left.len(), 1);
+ assert_eq!(right.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ assert_eq!(*left.first_key_value().unwrap().0, 0);
+ assert_eq!(*right.first_key_value().unwrap().0, 1);
+}
+
+// In a tree with 3 levels, if only part of the last leaf node is split off,
+// make sure fix_top eliminates both top levels.
+#[test]
+fn test_split_off_tiny_right_height_2() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ let last = MIN_INSERTS_HEIGHT_2 - 1;
+ let mut left = BTreeMap::from_iter(pairs.clone());
+ assert_eq!(*left.last_key_value().unwrap().0, last);
+ let right = left.split_off(&last);
+ left.check();
+ right.check();
+ assert_eq!(left.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ assert_eq!(right.len(), 1);
+ assert_eq!(*left.last_key_value().unwrap().0, last - 1);
+ assert_eq!(*right.last_key_value().unwrap().0, last);
+}
+
+#[test]
+fn test_split_off_halfway() {
+ let mut rng = DeterministicRng::new();
+ for &len in &[node::CAPACITY, 25, 50, 75, 100] {
+ let mut data = Vec::from_iter((0..len).map(|_| (rng.next(), ())));
+ // Insertion in non-ascending order creates some variation in node length.
+ let mut map = BTreeMap::from_iter(data.iter().copied());
+ data.sort();
+ let small_keys = data.iter().take(len / 2).map(|kv| kv.0);
+ let large_keys = data.iter().skip(len / 2).map(|kv| kv.0);
+ let split_key = large_keys.clone().next().unwrap();
+ let right = map.split_off(&split_key);
+ map.check();
+ right.check();
+ assert!(map.keys().copied().eq(small_keys));
+ assert!(right.keys().copied().eq(large_keys));
+ }
+}
+
+#[test]
+fn test_split_off_large_random_sorted() {
+ // Miri is too slow
+ let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) };
+ // special case with maximum height.
+ data.sort();
+
+ let mut map = BTreeMap::from_iter(data.clone());
+ let key = data[data.len() / 2].0;
+ let right = map.split_off(&key);
+ map.check();
+ right.check();
+
+ assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
+ assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
+}
+
+#[test]
+fn test_into_iter_drop_leak_height_0() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+ let d = CrashTestDummy::new(3);
+ let e = CrashTestDummy::new(4);
+ let mut map = BTreeMap::new();
+ map.insert("a", a.spawn(Panic::Never));
+ map.insert("b", b.spawn(Panic::Never));
+ map.insert("c", c.spawn(Panic::Never));
+ map.insert("d", d.spawn(Panic::InDrop));
+ map.insert("e", e.spawn(Panic::Never));
+
+ catch_unwind(move || drop(map.into_iter())).unwrap_err();
+
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 1);
+ assert_eq!(c.dropped(), 1);
+ assert_eq!(d.dropped(), 1);
+ assert_eq!(e.dropped(), 1);
+}
+
+#[test]
+fn test_into_iter_drop_leak_height_1() {
+ let size = MIN_INSERTS_HEIGHT_1;
+ for panic_point in vec![0, 1, size - 2, size - 1] {
+ let dummies = Vec::from_iter((0..size).map(|i| CrashTestDummy::new(i)));
+ let map = BTreeMap::from_iter((0..size).map(|i| {
+ let panic = if i == panic_point { Panic::InDrop } else { Panic::Never };
+ (dummies[i].spawn(Panic::Never), dummies[i].spawn(panic))
+ }));
+ catch_unwind(move || drop(map.into_iter())).unwrap_err();
+ for i in 0..size {
+ assert_eq!(dummies[i].dropped(), 2);
+ }
+ }
+}
+
+#[test]
+fn test_into_keys() {
+ let map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]);
+ let keys = Vec::from_iter(map.into_keys());
+
+ assert_eq!(keys.len(), 3);
+ assert!(keys.contains(&1));
+ assert!(keys.contains(&2));
+ assert!(keys.contains(&3));
+}
+
+#[test]
+fn test_into_values() {
+ let map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]);
+ let values = Vec::from_iter(map.into_values());
+
+ assert_eq!(values.len(), 3);
+ assert!(values.contains(&'a'));
+ assert!(values.contains(&'b'));
+ assert!(values.contains(&'c'));
+}
+
+#[test]
+fn test_insert_remove_intertwined() {
+ let loops = if cfg!(miri) { 100 } else { 1_000_000 };
+ let mut map = BTreeMap::new();
+ let mut i = 1;
+ let offset = 165; // somewhat arbitrarily chosen to cover some code paths
+ for _ in 0..loops {
+ i = (i + offset) & 0xFF;
+ map.insert(i, i);
+ map.remove(&(0xFF - i));
+ }
+ map.check();
+}
+
+#[test]
+fn test_insert_remove_intertwined_ord_chaos() {
+ let loops = if cfg!(miri) { 100 } else { 1_000_000 };
+ let gov = Governor::new();
+ let mut map = BTreeMap::new();
+ let mut i = 1;
+ let offset = 165; // more arbitrarily copied from above
+ for _ in 0..loops {
+ i = (i + offset) & 0xFF;
+ map.insert(Governed(i, &gov), ());
+ map.remove(&Governed(0xFF - i, &gov));
+ gov.flip();
+ }
+ map.check_invariants();
+}
+
+#[test]
+fn from_array() {
+ let map = BTreeMap::from([(1, 2), (3, 4)]);
+ let unordered_duplicates = BTreeMap::from([(3, 4), (1, 2), (1, 2)]);
+ assert_eq!(map, unordered_duplicates);
+}
diff --git a/library/alloc/src/collections/btree/mem.rs b/library/alloc/src/collections/btree/mem.rs
new file mode 100644
index 000000000..e1363d1ae
--- /dev/null
+++ b/library/alloc/src/collections/btree/mem.rs
@@ -0,0 +1,35 @@
+use core::intrinsics;
+use core::mem;
+use core::ptr;
+
+/// This replaces the value behind the `v` unique reference by calling the
+/// relevant function.
+///
+/// If a panic occurs in the `change` closure, the entire process will be aborted.
+#[allow(dead_code)] // keep as illustration and for future use
+#[inline]
+pub fn take_mut<T>(v: &mut T, change: impl FnOnce(T) -> T) {
+ replace(v, |value| (change(value), ()))
+}
+
+/// This replaces the value behind the `v` unique reference by calling the
+/// relevant function, and returns a result obtained along the way.
+///
+/// If a panic occurs in the `change` closure, the entire process will be aborted.
+#[inline]
+pub fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
+ struct PanicGuard;
+ impl Drop for PanicGuard {
+ fn drop(&mut self) {
+ intrinsics::abort()
+ }
+ }
+ let guard = PanicGuard;
+ let value = unsafe { ptr::read(v) };
+ let (new_value, ret) = change(value);
+ unsafe {
+ ptr::write(v, new_value);
+ }
+ mem::forget(guard);
+ ret
+}
diff --git a/library/alloc/src/collections/btree/merge_iter.rs b/library/alloc/src/collections/btree/merge_iter.rs
new file mode 100644
index 000000000..7f23d93b9
--- /dev/null
+++ b/library/alloc/src/collections/btree/merge_iter.rs
@@ -0,0 +1,98 @@
+use core::cmp::Ordering;
+use core::fmt::{self, Debug};
+use core::iter::FusedIterator;
+
+/// Core of an iterator that merges the output of two strictly ascending iterators,
+/// for instance a union or a symmetric difference.
+pub struct MergeIterInner<I: Iterator> {
+ a: I,
+ b: I,
+ peeked: Option<Peeked<I>>,
+}
+
+/// Benchmarks faster than wrapping both iterators in a Peekable,
+/// probably because we can afford to impose a FusedIterator bound.
+#[derive(Clone, Debug)]
+enum Peeked<I: Iterator> {
+ A(I::Item),
+ B(I::Item),
+}
+
+impl<I: Iterator> Clone for MergeIterInner<I>
+where
+ I: Clone,
+ I::Item: Clone,
+{
+ fn clone(&self) -> Self {
+ Self { a: self.a.clone(), b: self.b.clone(), peeked: self.peeked.clone() }
+ }
+}
+
+impl<I: Iterator> Debug for MergeIterInner<I>
+where
+ I: Debug,
+ I::Item: Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).field(&self.peeked).finish()
+ }
+}
+
+impl<I: Iterator> MergeIterInner<I> {
+ /// Creates a new core for an iterator merging a pair of sources.
+ pub fn new(a: I, b: I) -> Self {
+ MergeIterInner { a, b, peeked: None }
+ }
+
+ /// Returns the next pair of items stemming from the pair of sources
+ /// being merged. If both returned options contain a value, that value
+ /// is equal and occurs in both sources. If one of the returned options
+ /// contains a value, that value doesn't occur in the other source (or
+ /// the sources are not strictly ascending). If neither returned option
+ /// contains a value, iteration has finished and subsequent calls will
+ /// return the same empty pair.
+ pub fn nexts<Cmp: Fn(&I::Item, &I::Item) -> Ordering>(
+ &mut self,
+ cmp: Cmp,
+ ) -> (Option<I::Item>, Option<I::Item>)
+ where
+ I: FusedIterator,
+ {
+ let mut a_next;
+ let mut b_next;
+ match self.peeked.take() {
+ Some(Peeked::A(next)) => {
+ a_next = Some(next);
+ b_next = self.b.next();
+ }
+ Some(Peeked::B(next)) => {
+ b_next = Some(next);
+ a_next = self.a.next();
+ }
+ None => {
+ a_next = self.a.next();
+ b_next = self.b.next();
+ }
+ }
+ if let (Some(ref a1), Some(ref b1)) = (&a_next, &b_next) {
+ match cmp(a1, b1) {
+ Ordering::Less => self.peeked = b_next.take().map(Peeked::B),
+ Ordering::Greater => self.peeked = a_next.take().map(Peeked::A),
+ Ordering::Equal => (),
+ }
+ }
+ (a_next, b_next)
+ }
+
+ /// Returns a pair of upper bounds for the `size_hint` of the final iterator.
+ pub fn lens(&self) -> (usize, usize)
+ where
+ I: ExactSizeIterator,
+ {
+ match self.peeked {
+ Some(Peeked::A(_)) => (1 + self.a.len(), self.b.len()),
+ Some(Peeked::B(_)) => (self.a.len(), 1 + self.b.len()),
+ _ => (self.a.len(), self.b.len()),
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/mod.rs b/library/alloc/src/collections/btree/mod.rs
new file mode 100644
index 000000000..9d43ac5c5
--- /dev/null
+++ b/library/alloc/src/collections/btree/mod.rs
@@ -0,0 +1,26 @@
+mod append;
+mod borrow;
+mod dedup_sorted_iter;
+mod fix;
+pub mod map;
+mod mem;
+mod merge_iter;
+mod navigate;
+mod node;
+mod remove;
+mod search;
+pub mod set;
+mod set_val;
+mod split;
+
+#[doc(hidden)]
+trait Recover<Q: ?Sized> {
+ type Key;
+
+ fn get(&self, key: &Q) -> Option<&Self::Key>;
+ fn take(&mut self, key: &Q) -> Option<Self::Key>;
+ fn replace(&mut self, key: Self::Key) -> Option<Self::Key>;
+}
+
+#[cfg(test)]
+mod testing;
diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs
new file mode 100644
index 000000000..1e33c1e64
--- /dev/null
+++ b/library/alloc/src/collections/btree/navigate.rs
@@ -0,0 +1,719 @@
+use core::borrow::Borrow;
+use core::hint;
+use core::ops::RangeBounds;
+use core::ptr;
+
+use super::node::{marker, ForceResult::*, Handle, NodeRef};
+
+use crate::alloc::Allocator;
+// `front` and `back` are always both `None` or both `Some`.
+pub struct LeafRange<BorrowType, K, V> {
+ front: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
+ back: Option<Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>>,
+}
+
+impl<'a, K: 'a, V: 'a> Clone for LeafRange<marker::Immut<'a>, K, V> {
+ fn clone(&self) -> Self {
+ LeafRange { front: self.front.clone(), back: self.back.clone() }
+ }
+}
+
+impl<BorrowType, K, V> LeafRange<BorrowType, K, V> {
+ pub fn none() -> Self {
+ LeafRange { front: None, back: None }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.front == self.back
+ }
+
+ /// Temporarily takes out another, immutable equivalent of the same range.
+ pub fn reborrow(&self) -> LeafRange<marker::Immut<'_>, K, V> {
+ LeafRange {
+ front: self.front.as_ref().map(|f| f.reborrow()),
+ back: self.back.as_ref().map(|b| b.reborrow()),
+ }
+ }
+}
+
+impl<'a, K, V> LeafRange<marker::Immut<'a>, K, V> {
+ #[inline]
+ pub fn next_checked(&mut self) -> Option<(&'a K, &'a V)> {
+ self.perform_next_checked(|kv| kv.into_kv())
+ }
+
+ #[inline]
+ pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a V)> {
+ self.perform_next_back_checked(|kv| kv.into_kv())
+ }
+}
+
+impl<'a, K, V> LeafRange<marker::ValMut<'a>, K, V> {
+ #[inline]
+ pub fn next_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
+ self.perform_next_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut())
+ }
+
+ #[inline]
+ pub fn next_back_checked(&mut self) -> Option<(&'a K, &'a mut V)> {
+ self.perform_next_back_checked(|kv| unsafe { ptr::read(kv) }.into_kv_valmut())
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V> LeafRange<BorrowType, K, V> {
+ /// If possible, extract some result from the following KV and move to the edge beyond it.
+ fn perform_next_checked<F, R>(&mut self, f: F) -> Option<R>
+ where
+ F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
+ {
+ if self.is_empty() {
+ None
+ } else {
+ super::mem::replace(self.front.as_mut().unwrap(), |front| {
+ let kv = front.next_kv().ok().unwrap();
+ let result = f(&kv);
+ (kv.next_leaf_edge(), Some(result))
+ })
+ }
+ }
+
+ /// If possible, extract some result from the preceding KV and move to the edge beyond it.
+ fn perform_next_back_checked<F, R>(&mut self, f: F) -> Option<R>
+ where
+ F: Fn(&Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>) -> R,
+ {
+ if self.is_empty() {
+ None
+ } else {
+ super::mem::replace(self.back.as_mut().unwrap(), |back| {
+ let kv = back.next_back_kv().ok().unwrap();
+ let result = f(&kv);
+ (kv.next_back_leaf_edge(), Some(result))
+ })
+ }
+ }
+}
+
+enum LazyLeafHandle<BorrowType, K, V> {
+ Root(NodeRef<BorrowType, K, V, marker::LeafOrInternal>), // not yet descended
+ Edge(Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>),
+}
+
+impl<'a, K: 'a, V: 'a> Clone for LazyLeafHandle<marker::Immut<'a>, K, V> {
+ fn clone(&self) -> Self {
+ match self {
+ LazyLeafHandle::Root(root) => LazyLeafHandle::Root(*root),
+ LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(*edge),
+ }
+ }
+}
+
+impl<BorrowType, K, V> LazyLeafHandle<BorrowType, K, V> {
+ fn reborrow(&self) -> LazyLeafHandle<marker::Immut<'_>, K, V> {
+ match self {
+ LazyLeafHandle::Root(root) => LazyLeafHandle::Root(root.reborrow()),
+ LazyLeafHandle::Edge(edge) => LazyLeafHandle::Edge(edge.reborrow()),
+ }
+ }
+}
+
+// `front` and `back` are always both `None` or both `Some`.
+pub struct LazyLeafRange<BorrowType, K, V> {
+ front: Option<LazyLeafHandle<BorrowType, K, V>>,
+ back: Option<LazyLeafHandle<BorrowType, K, V>>,
+}
+
+impl<'a, K: 'a, V: 'a> Clone for LazyLeafRange<marker::Immut<'a>, K, V> {
+ fn clone(&self) -> Self {
+ LazyLeafRange { front: self.front.clone(), back: self.back.clone() }
+ }
+}
+
+impl<BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
+ pub fn none() -> Self {
+ LazyLeafRange { front: None, back: None }
+ }
+
+ /// Temporarily takes out another, immutable equivalent of the same range.
+ pub fn reborrow(&self) -> LazyLeafRange<marker::Immut<'_>, K, V> {
+ LazyLeafRange {
+ front: self.front.as_ref().map(|f| f.reborrow()),
+ back: self.back.as_ref().map(|b| b.reborrow()),
+ }
+ }
+}
+
+impl<'a, K, V> LazyLeafRange<marker::Immut<'a>, K, V> {
+ #[inline]
+ pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+ unsafe { self.init_front().unwrap().next_unchecked() }
+ }
+
+ #[inline]
+ pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+ unsafe { self.init_back().unwrap().next_back_unchecked() }
+ }
+}
+
+impl<'a, K, V> LazyLeafRange<marker::ValMut<'a>, K, V> {
+ #[inline]
+ pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ unsafe { self.init_front().unwrap().next_unchecked() }
+ }
+
+ #[inline]
+ pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ unsafe { self.init_back().unwrap().next_back_unchecked() }
+ }
+}
+
+impl<K, V> LazyLeafRange<marker::Dying, K, V> {
+ fn take_front(
+ &mut self,
+ ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>> {
+ match self.front.take()? {
+ LazyLeafHandle::Root(root) => Some(root.first_leaf_edge()),
+ LazyLeafHandle::Edge(edge) => Some(edge),
+ }
+ }
+
+ #[inline]
+ pub unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
+ &mut self,
+ alloc: A,
+ ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
+ debug_assert!(self.front.is_some());
+ let front = self.init_front().unwrap();
+ unsafe { front.deallocating_next_unchecked(alloc) }
+ }
+
+ #[inline]
+ pub unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
+ &mut self,
+ alloc: A,
+ ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
+ debug_assert!(self.back.is_some());
+ let back = self.init_back().unwrap();
+ unsafe { back.deallocating_next_back_unchecked(alloc) }
+ }
+
+ #[inline]
+ pub fn deallocating_end<A: Allocator + Clone>(&mut self, alloc: A) {
+ if let Some(front) = self.take_front() {
+ front.deallocating_end(alloc)
+ }
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
+ fn init_front(
+ &mut self,
+ ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
+ if let Some(LazyLeafHandle::Root(root)) = &self.front {
+ self.front = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.first_leaf_edge()));
+ }
+ match &mut self.front {
+ None => None,
+ Some(LazyLeafHandle::Edge(edge)) => Some(edge),
+ // SAFETY: the code above would have replaced it.
+ Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
+ }
+ }
+
+ fn init_back(
+ &mut self,
+ ) -> Option<&mut Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>> {
+ if let Some(LazyLeafHandle::Root(root)) = &self.back {
+ self.back = Some(LazyLeafHandle::Edge(unsafe { ptr::read(root) }.last_leaf_edge()));
+ }
+ match &mut self.back {
+ None => None,
+ Some(LazyLeafHandle::Edge(edge)) => Some(edge),
+ // SAFETY: the code above would have replaced it.
+ Some(LazyLeafHandle::Root(_)) => unsafe { hint::unreachable_unchecked() },
+ }
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Finds the distinct leaf edges delimiting a specified range in a tree.
+ ///
+ /// If such distinct edges exist, returns them in ascending order, meaning
+ /// that a non-zero number of calls to `next_unchecked` on the `front` of
+ /// the result and/or calls to `next_back_unchecked` on the `back` of the
+ /// result will eventually reach the same edge.
+ ///
+ /// If there are no such edges, i.e., if the tree contains no key within
+ /// the range, returns an empty `front` and `back`.
+ ///
+ /// # Safety
+ /// Unless `BorrowType` is `Immut`, do not use the handles to visit the same
+ /// KV twice.
+ unsafe fn find_leaf_edges_spanning_range<Q: ?Sized, R>(
+ self,
+ range: R,
+ ) -> LeafRange<BorrowType, K, V>
+ where
+ Q: Ord,
+ K: Borrow<Q>,
+ R: RangeBounds<Q>,
+ {
+ match self.search_tree_for_bifurcation(&range) {
+ Err(_) => LeafRange::none(),
+ Ok((
+ node,
+ lower_edge_idx,
+ upper_edge_idx,
+ mut lower_child_bound,
+ mut upper_child_bound,
+ )) => {
+ let mut lower_edge = unsafe { Handle::new_edge(ptr::read(&node), lower_edge_idx) };
+ let mut upper_edge = unsafe { Handle::new_edge(node, upper_edge_idx) };
+ loop {
+ match (lower_edge.force(), upper_edge.force()) {
+ (Leaf(f), Leaf(b)) => return LeafRange { front: Some(f), back: Some(b) },
+ (Internal(f), Internal(b)) => {
+ (lower_edge, lower_child_bound) =
+ f.descend().find_lower_bound_edge(lower_child_bound);
+ (upper_edge, upper_child_bound) =
+ b.descend().find_upper_bound_edge(upper_child_bound);
+ }
+ _ => unreachable!("BTreeMap has different depths"),
+ }
+ }
+ }
+ }
+ }
+}
+
+fn full_range<BorrowType: marker::BorrowType, K, V>(
+ root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+) -> LazyLeafRange<BorrowType, K, V> {
+ LazyLeafRange {
+ front: Some(LazyLeafHandle::Root(root1)),
+ back: Some(LazyLeafHandle::Root(root2)),
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
+ /// Finds the pair of leaf edges delimiting a specific range in a tree.
+ ///
+ /// The result is meaningful only if the tree is ordered by key, like the tree
+ /// in a `BTreeMap` is.
+ pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::Immut<'a>, K, V>
+ where
+ Q: ?Sized + Ord,
+ K: Borrow<Q>,
+ R: RangeBounds<Q>,
+ {
+ // SAFETY: our borrow type is immutable.
+ unsafe { self.find_leaf_edges_spanning_range(range) }
+ }
+
+ /// Finds the pair of leaf edges delimiting an entire tree.
+ pub fn full_range(self) -> LazyLeafRange<marker::Immut<'a>, K, V> {
+ full_range(self, self)
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::ValMut<'a>, K, V, marker::LeafOrInternal> {
+ /// Splits a unique reference into a pair of leaf edges delimiting a specified range.
+ /// The result are non-unique references allowing (some) mutation, which must be used
+ /// carefully.
+ ///
+ /// The result is meaningful only if the tree is ordered by key, like the tree
+ /// in a `BTreeMap` is.
+ ///
+ /// # Safety
+ /// Do not use the duplicate handles to visit the same KV twice.
+ pub fn range_search<Q, R>(self, range: R) -> LeafRange<marker::ValMut<'a>, K, V>
+ where
+ Q: ?Sized + Ord,
+ K: Borrow<Q>,
+ R: RangeBounds<Q>,
+ {
+ unsafe { self.find_leaf_edges_spanning_range(range) }
+ }
+
+ /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
+ /// The results are non-unique references allowing mutation (of values only), so must be used
+ /// with care.
+ pub fn full_range(self) -> LazyLeafRange<marker::ValMut<'a>, K, V> {
+ // We duplicate the root NodeRef here -- we will never visit the same KV
+ // twice, and never end up with overlapping value references.
+ let self2 = unsafe { ptr::read(&self) };
+ full_range(self, self2)
+ }
+}
+
+impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
+ /// Splits a unique reference into a pair of leaf edges delimiting the full range of the tree.
+ /// The results are non-unique references allowing massively destructive mutation, so must be
+ /// used with the utmost care.
+ pub fn full_range(self) -> LazyLeafRange<marker::Dying, K, V> {
+ // We duplicate the root NodeRef here -- we will never access it in a way
+ // that overlaps references obtained from the root.
+ let self2 = unsafe { ptr::read(&self) };
+ full_range(self, self2)
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V>
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
+{
+ /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
+ /// on the right side, which is either in the same leaf node or in an ancestor node.
+ /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
+ pub fn next_kv(
+ self,
+ ) -> Result<
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
+ NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ > {
+ let mut edge = self.forget_node_type();
+ loop {
+ edge = match edge.right_kv() {
+ Ok(kv) => return Ok(kv),
+ Err(last_edge) => match last_edge.into_node().ascend() {
+ Ok(parent_edge) => parent_edge.forget_node_type(),
+ Err(root) => return Err(root),
+ },
+ }
+ }
+ }
+
+ /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
+ /// on the left side, which is either in the same leaf node or in an ancestor node.
+ /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
+ fn next_back_kv(
+ self,
+ ) -> Result<
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
+ NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ > {
+ let mut edge = self.forget_node_type();
+ loop {
+ edge = match edge.left_kv() {
+ Ok(kv) => return Ok(kv),
+ Err(last_edge) => match last_edge.into_node().ascend() {
+ Ok(parent_edge) => parent_edge.forget_node_type(),
+ Err(root) => return Err(root),
+ },
+ }
+ }
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V>
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
+{
+ /// Given an internal edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
+ /// on the right side, which is either in the same internal node or in an ancestor node.
+ /// If the internal edge is the last one in the tree, returns [`Result::Err`] with the root node.
+ fn next_kv(
+ self,
+ ) -> Result<
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>,
+ NodeRef<BorrowType, K, V, marker::Internal>,
+ > {
+ let mut edge = self;
+ loop {
+ edge = match edge.right_kv() {
+ Ok(internal_kv) => return Ok(internal_kv),
+ Err(last_edge) => match last_edge.into_node().ascend() {
+ Ok(parent_edge) => parent_edge,
+ Err(root) => return Err(root),
+ },
+ }
+ }
+ }
+}
+
+impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
+ /// Given a leaf edge handle into a dying tree, returns the next leaf edge
+ /// on the right side, and the key-value pair in between, if they exist.
+ ///
+ /// If the given edge is the last one in a leaf, this method deallocates
+ /// the leaf, as well as any ancestor nodes whose last edge was reached.
+ /// This implies that if no more key-value pair follows, the entire tree
+ /// will have been deallocated and there is nothing left to return.
+ ///
+ /// # Safety
+ /// - The given edge must not have been previously returned by counterpart
+ /// `deallocating_next_back`.
+ /// - The returned KV handle is only valid to access the key and value,
+ /// and only valid until the next call to a `deallocating_` method.
+ unsafe fn deallocating_next<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
+ {
+ let mut edge = self.forget_node_type();
+ loop {
+ edge = match edge.right_kv() {
+ Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)),
+ Err(last_edge) => {
+ match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
+ Some(parent_edge) => parent_edge.forget_node_type(),
+ None => return None,
+ }
+ }
+ }
+ }
+ }
+
+ /// Given a leaf edge handle into a dying tree, returns the next leaf edge
+ /// on the left side, and the key-value pair in between, if they exist.
+ ///
+ /// If the given edge is the first one in a leaf, this method deallocates
+ /// the leaf, as well as any ancestor nodes whose first edge was reached.
+ /// This implies that if no more key-value pair follows, the entire tree
+ /// will have been deallocated and there is nothing left to return.
+ ///
+ /// # Safety
+ /// - The given edge must not have been previously returned by counterpart
+ /// `deallocating_next`.
+ /// - The returned KV handle is only valid to access the key and value,
+ /// and only valid until the next call to a `deallocating_` method.
+ unsafe fn deallocating_next_back<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
+ {
+ let mut edge = self.forget_node_type();
+ loop {
+ edge = match edge.left_kv() {
+ Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)),
+ Err(last_edge) => {
+ match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
+ Some(parent_edge) => parent_edge.forget_node_type(),
+ None => return None,
+ }
+ }
+ }
+ }
+ }
+
+ /// Deallocates a pile of nodes from the leaf up to the root.
+ /// This is the only way to deallocate the remainder of a tree after
+ /// `deallocating_next` and `deallocating_next_back` have been nibbling at
+ /// both sides of the tree, and have hit the same edge. As it is intended
+ /// only to be called when all keys and values have been returned,
+ /// no cleanup is done on any of the keys or values.
+ fn deallocating_end<A: Allocator + Clone>(self, alloc: A) {
+ let mut edge = self.forget_node_type();
+ while let Some(parent_edge) =
+ unsafe { edge.into_node().deallocate_and_ascend(alloc.clone()) }
+ {
+ edge = parent_edge.forget_node_type();
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Moves the leaf edge handle to the next leaf edge and returns references to the
+ /// key and value in between.
+ ///
+ /// # Safety
+ /// There must be another KV in the direction travelled.
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+ super::mem::replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_kv().ok().unwrap();
+ (kv.next_leaf_edge(), kv.into_kv())
+ })
+ }
+
+ /// Moves the leaf edge handle to the previous leaf edge and returns references to the
+ /// key and value in between.
+ ///
+ /// # Safety
+ /// There must be another KV in the direction travelled.
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+ super::mem::replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_back_kv().ok().unwrap();
+ (kv.next_back_leaf_edge(), kv.into_kv())
+ })
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::ValMut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Moves the leaf edge handle to the next leaf edge and returns references to the
+ /// key and value in between.
+ ///
+ /// # Safety
+ /// There must be another KV in the direction travelled.
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let kv = super::mem::replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_kv().ok().unwrap();
+ (unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)
+ });
+ // Doing this last is faster, according to benchmarks.
+ kv.into_kv_valmut()
+ }
+
+ /// Moves the leaf edge handle to the previous leaf and returns references to the
+ /// key and value in between.
+ ///
+ /// # Safety
+ /// There must be another KV in the direction travelled.
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let kv = super::mem::replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_back_kv().ok().unwrap();
+ (unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)
+ });
+ // Doing this last is faster, according to benchmarks.
+ kv.into_kv_valmut()
+ }
+}
+
+impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
+ /// Moves the leaf edge handle to the next leaf edge and returns the key and value
+ /// in between, deallocating any node left behind while leaving the corresponding
+ /// edge in its parent node dangling.
+ ///
+ /// # Safety
+ /// - There must be another KV in the direction travelled.
+ /// - That KV was not previously returned by counterpart
+ /// `deallocating_next_back_unchecked` on any copy of the handles
+ /// being used to traverse the tree.
+ ///
+ /// The only safe way to proceed with the updated handle is to compare it, drop it,
+ /// or call this method or counterpart `deallocating_next_back_unchecked` again.
+ unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
+ &mut self,
+ alloc: A,
+ ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
+ super::mem::replace(self, |leaf_edge| unsafe {
+ leaf_edge.deallocating_next(alloc).unwrap()
+ })
+ }
+
+ /// Moves the leaf edge handle to the previous leaf edge and returns the key and value
+ /// in between, deallocating any node left behind while leaving the corresponding
+ /// edge in its parent node dangling.
+ ///
+ /// # Safety
+ /// - There must be another KV in the direction travelled.
+ /// - That leaf edge was not previously returned by counterpart
+ /// `deallocating_next_unchecked` on any copy of the handles
+ /// being used to traverse the tree.
+ ///
+ /// The only safe way to proceed with the updated handle is to compare it, drop it,
+ /// or call this method or counterpart `deallocating_next_unchecked` again.
+ unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
+ &mut self,
+ alloc: A,
+ ) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
+ super::mem::replace(self, |leaf_edge| unsafe {
+ leaf_edge.deallocating_next_back(alloc).unwrap()
+ })
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
+ /// you need first when navigating forward (or last when navigating backward).
+ #[inline]
+ pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ let mut node = self;
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.first_edge(),
+ Internal(internal) => node = internal.first_edge().descend(),
+ }
+ }
+ }
+
+ /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
+ /// you need last when navigating forward (or first when navigating backward).
+ #[inline]
+ pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ let mut node = self;
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.last_edge(),
+ Internal(internal) => node = internal.last_edge().descend(),
+ }
+ }
+ }
+}
+
+pub enum Position<BorrowType, K, V> {
+ Leaf(NodeRef<BorrowType, K, V, marker::Leaf>),
+ Internal(NodeRef<BorrowType, K, V, marker::Internal>),
+ InternalKV(Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::KV>),
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
+ /// Visits leaf nodes and internal KVs in order of ascending keys, and also
+ /// visits internal nodes as a whole in a depth first order, meaning that
+ /// internal nodes precede their individual KVs and their child nodes.
+ pub fn visit_nodes_in_order<F>(self, mut visit: F)
+ where
+ F: FnMut(Position<marker::Immut<'a>, K, V>),
+ {
+ match self.force() {
+ Leaf(leaf) => visit(Position::Leaf(leaf)),
+ Internal(internal) => {
+ visit(Position::Internal(internal));
+ let mut edge = internal.first_edge();
+ loop {
+ edge = match edge.descend().force() {
+ Leaf(leaf) => {
+ visit(Position::Leaf(leaf));
+ match edge.next_kv() {
+ Ok(kv) => {
+ visit(Position::InternalKV(kv));
+ kv.right_edge()
+ }
+ Err(_) => return,
+ }
+ }
+ Internal(internal) => {
+ visit(Position::Internal(internal));
+ internal.first_edge()
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /// Calculates the number of elements in a (sub)tree.
+ pub fn calc_length(self) -> usize {
+ let mut result = 0;
+ self.visit_nodes_in_order(|pos| match pos {
+ Position::Leaf(node) => result += node.len(),
+ Position::Internal(node) => result += node.len(),
+ Position::InternalKV(_) => (),
+ });
+ result
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V>
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>
+{
+ /// Returns the leaf edge closest to a KV for forward navigation.
+ pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ match self.force() {
+ Leaf(leaf_kv) => leaf_kv.right_edge(),
+ Internal(internal_kv) => {
+ let next_internal_edge = internal_kv.right_edge();
+ next_internal_edge.descend().first_leaf_edge()
+ }
+ }
+ }
+
+ /// Returns the leaf edge closest to a KV for backward navigation.
+ fn next_back_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ match self.force() {
+ Leaf(leaf_kv) => leaf_kv.left_edge(),
+ Internal(internal_kv) => {
+ let next_internal_edge = internal_kv.left_edge();
+ next_internal_edge.descend().last_leaf_edge()
+ }
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs
new file mode 100644
index 000000000..d831161bc
--- /dev/null
+++ b/library/alloc/src/collections/btree/node.rs
@@ -0,0 +1,1753 @@
+// This is an attempt at an implementation following the ideal
+//
+// ```
+// struct BTreeMap<K, V> {
+// height: usize,
+// root: Option<Box<Node<K, V, height>>>
+// }
+//
+// struct Node<K, V, height: usize> {
+// keys: [K; 2 * B - 1],
+// vals: [V; 2 * B - 1],
+// edges: [if height > 0 { Box<Node<K, V, height - 1>> } else { () }; 2 * B],
+// parent: Option<(NonNull<Node<K, V, height + 1>>, u16)>,
+// len: u16,
+// }
+// ```
+//
+// Since Rust doesn't actually have dependent types and polymorphic recursion,
+// we make do with lots of unsafety.
+
+// A major goal of this module is to avoid complexity by treating the tree as a generic (if
+// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
+// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
+// even what underfull means. However, we do rely on a few invariants:
+//
+// - Trees must have uniform depth/height. This means that every path down to a leaf from a
+// given node has exactly the same length.
+// - A node of length `n` has `n` keys, `n` values, and `n + 1` edges.
+// This implies that even an empty node has at least one edge.
+// For a leaf node, "having an edge" only means we can identify a position in the node,
+// since leaf edges are empty and need no data representation. In an internal node,
+// an edge both identifies a position and contains a pointer to a child node.
+
+use core::marker::PhantomData;
+use core::mem::{self, MaybeUninit};
+use core::ptr::{self, NonNull};
+use core::slice::SliceIndex;
+
+use crate::alloc::{Allocator, Layout};
+use crate::boxed::Box;
+
+const B: usize = 6;
+pub const CAPACITY: usize = 2 * B - 1;
+pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
+const KV_IDX_CENTER: usize = B - 1;
+const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
+const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
+
+/// The underlying representation of leaf nodes and part of the representation of internal nodes.
+struct LeafNode<K, V> {
+ /// We want to be covariant in `K` and `V`.
+ parent: Option<NonNull<InternalNode<K, V>>>,
+
+ /// This node's index into the parent node's `edges` array.
+ /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
+ /// This is only guaranteed to be initialized when `parent` is non-null.
+ parent_idx: MaybeUninit<u16>,
+
+ /// The number of keys and values this node stores.
+ len: u16,
+
+ /// The arrays storing the actual data of the node. Only the first `len` elements of each
+ /// array are initialized and valid.
+ keys: [MaybeUninit<K>; CAPACITY],
+ vals: [MaybeUninit<V>; CAPACITY],
+}
+
+impl<K, V> LeafNode<K, V> {
+ /// Initializes a new `LeafNode` in-place.
+ unsafe fn init(this: *mut Self) {
+ // As a general policy, we leave fields uninitialized if they can be, as this should
+ // be both slightly faster and easier to track in Valgrind.
+ unsafe {
+ // parent_idx, keys, and vals are all MaybeUninit
+ ptr::addr_of_mut!((*this).parent).write(None);
+ ptr::addr_of_mut!((*this).len).write(0);
+ }
+ }
+
+ /// Creates a new boxed `LeafNode`.
+ fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
+ unsafe {
+ let mut leaf = Box::new_uninit_in(alloc);
+ LeafNode::init(leaf.as_mut_ptr());
+ leaf.assume_init()
+ }
+ }
+}
+
+/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
+/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
+/// `InternalNode` can be directly cast to a pointer to the underlying `LeafNode` portion of the
+/// node, allowing code to act on leaf and internal nodes generically without having to even check
+/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
+#[repr(C)]
+// gdb_providers.py uses this type name for introspection.
+struct InternalNode<K, V> {
+ data: LeafNode<K, V>,
+
+ /// The pointers to the children of this node. `len + 1` of these are considered
+ /// initialized and valid, except that near the end, while the tree is held
+ /// through borrow type `Dying`, some of these pointers are dangling.
+ edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
+}
+
+impl<K, V> InternalNode<K, V> {
+ /// Creates a new boxed `InternalNode`.
+ ///
+ /// # Safety
+ /// An invariant of internal nodes is that they have at least one
+ /// initialized and valid edge. This function does not set up
+ /// such an edge.
+ unsafe fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
+ unsafe {
+ let mut node = Box::<Self, _>::new_uninit_in(alloc);
+ // We only need to initialize the data; the edges are MaybeUninit.
+ LeafNode::init(ptr::addr_of_mut!((*node.as_mut_ptr()).data));
+ node.assume_init()
+ }
+ }
+}
+
+/// A managed, non-null pointer to a node. This is either an owned pointer to
+/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
+///
+/// However, `BoxedNode` contains no information as to which of the two types
+/// of nodes it actually contains, and, partially due to this lack of information,
+/// is not a separate type and has no destructor.
+type BoxedNode<K, V> = NonNull<LeafNode<K, V>>;
+
+// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
+// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
+// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
+// However, whenever a public type wraps `NodeRef`, make sure that it has the
+// correct variance.
+///
+/// A reference to a node.
+///
+/// This type has a number of parameters that controls how it acts:
+/// - `BorrowType`: A dummy type that describes the kind of borrow and carries a lifetime.
+/// - When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`.
+/// - When this is `ValMut<'a>`, the `NodeRef` acts roughly like `&'a Node`
+/// with respect to keys and tree structure, but also allows many
+/// mutable references to values throughout the tree to coexist.
+/// - When this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
+/// although insert methods allow a mutable pointer to a value to coexist.
+/// - When this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`,
+/// but does not have a destructor, and must be cleaned up manually.
+/// - When this is `Dying`, the `NodeRef` still acts roughly like `Box<Node>`,
+/// but has methods to destroy the tree bit by bit, and ordinary methods,
+/// while not marked as unsafe to call, can invoke UB if called incorrectly.
+/// Since any `NodeRef` allows navigating through the tree, `BorrowType`
+/// effectively applies to the entire tree, not just to the node itself.
+/// - `K` and `V`: These are the types of keys and values stored in the nodes.
+/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
+/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
+/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
+/// `NodeRef` could be pointing to either type of node.
+/// `Type` is named `NodeType` when used outside `NodeRef`.
+///
+/// Both `BorrowType` and `NodeType` restrict what methods we implement, to
+/// exploit static type safety. There are limitations in the way we can apply
+/// such restrictions:
+/// - For each type parameter, we can only define a method either generically
+/// or for one particular type. For example, we cannot define a method like
+/// `into_kv` generically for all `BorrowType`, or once for all types that
+/// carry a lifetime, because we want it to return `&'a` references.
+/// Therefore, we define it only for the least powerful type `Immut<'a>`.
+/// - We cannot get implicit coercion from say `Mut<'a>` to `Immut<'a>`.
+/// Therefore, we have to explicitly call `reborrow` on a more powerful
+/// `NodeRef` in order to reach a method like `into_kv`.
+///
+/// All methods on `NodeRef` that return some kind of reference, either:
+/// - Take `self` by value, and return the lifetime carried by `BorrowType`.
+/// Sometimes, to invoke such a method, we need to call `reborrow_mut`.
+/// - Take `self` by reference, and (implicitly) return that reference's
+/// lifetime, instead of the lifetime carried by `BorrowType`. That way,
+/// the borrow checker guarantees that the `NodeRef` remains borrowed as long
+/// as the returned reference is used.
+/// The methods supporting insert bend this rule by returning a raw pointer,
+/// i.e., a reference without any lifetime.
+pub struct NodeRef<BorrowType, K, V, Type> {
+ /// The number of levels that the node and the level of leaves are apart, a
+ /// constant of the node that cannot be entirely described by `Type`, and that
+ /// the node itself does not store. We only need to store the height of the root
+ /// node, and derive every other node's height from it.
+ /// Must be zero if `Type` is `Leaf` and non-zero if `Type` is `Internal`.
+ height: usize,
+ /// The pointer to the leaf or internal node. The definition of `InternalNode`
+ /// ensures that the pointer is valid either way.
+ node: NonNull<LeafNode<K, V>>,
+ _marker: PhantomData<(BorrowType, Type)>,
+}
+
+/// The root node of an owned tree.
+///
+/// Note that this does not have a destructor, and must be cleaned up manually.
+pub type Root<K, V> = NodeRef<marker::Owned, K, V, marker::LeafOrInternal>;
+
+impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
+impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
+
+unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::ValMut<'a>, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
+ pub fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
+ Self::from_new_leaf(LeafNode::new(alloc))
+ }
+
+ fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
+ NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
+ fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
+ let mut new_node = unsafe { InternalNode::new(alloc) };
+ new_node.edges[0].write(child.node);
+ unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
+ }
+
+ /// # Safety
+ /// `height` must not be zero.
+ unsafe fn from_new_internal<A: Allocator + Clone>(
+ internal: Box<InternalNode<K, V>, A>,
+ height: usize,
+ ) -> Self {
+ debug_assert!(height > 0);
+ let node = NonNull::from(Box::leak(internal)).cast();
+ let mut this = NodeRef { height, node, _marker: PhantomData };
+ this.borrow_mut().correct_all_childrens_parent_links();
+ this
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+ /// Unpack a node reference that was packed as `NodeRef::parent`.
+ fn from_internal(node: NonNull<InternalNode<K, V>>, height: usize) -> Self {
+ debug_assert!(height > 0);
+ NodeRef { height, node: node.cast(), _marker: PhantomData }
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+ /// Exposes the data of an internal node.
+ ///
+ /// Returns a raw ptr to avoid invalidating other references to this node.
+ fn as_internal_ptr(this: &Self) -> *mut InternalNode<K, V> {
+ // SAFETY: the static node type is `Internal`.
+ this.node.as_ptr() as *mut InternalNode<K, V>
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// Borrows exclusive access to the data of an internal node.
+ fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
+ let ptr = Self::as_internal_ptr(self);
+ unsafe { &mut *ptr }
+ }
+}
+
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Finds the length of the node. This is the number of keys or values.
+ /// The number of edges is `len() + 1`.
+ /// Note that, despite being safe, calling this function can have the side effect
+ /// of invalidating mutable references that unsafe code has created.
+ pub fn len(&self) -> usize {
+ // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut,
+ // there might be outstanding mutable references to values that we must not invalidate.
+ unsafe { usize::from((*Self::as_leaf_ptr(self)).len) }
+ }
+
+ /// Returns the number of levels that the node and leaves are apart. Zero
+ /// height means the node is a leaf itself. If you picture trees with the
+ /// root on top, the number says at which elevation the node appears.
+ /// If you picture trees with leaves on top, the number says how high
+ /// the tree extends above the node.
+ pub fn height(&self) -> usize {
+ self.height
+ }
+
+ /// Temporarily takes out another, immutable reference to the same node.
+ pub fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+
+ /// Exposes the leaf portion of any leaf or internal node.
+ ///
+ /// Returns a raw ptr to avoid invalidating other references to this node.
+ fn as_leaf_ptr(this: &Self) -> *mut LeafNode<K, V> {
+ // The node must be valid for at least the LeafNode portion.
+ // This is not a reference in the NodeRef type because we don't know if
+ // it should be unique or shared.
+ this.node.as_ptr()
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Finds the parent of the current node. Returns `Ok(handle)` if the current
+ /// node actually has a parent, where `handle` points to the edge of the parent
+ /// that points to the current node. Returns `Err(self)` if the current node has
+ /// no parent, giving back the original `NodeRef`.
+ ///
+ /// The method name assumes you picture trees with the root node on top.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn ascend(
+ self,
+ ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
+ assert!(BorrowType::PERMITS_TRAVERSAL);
+ // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut,
+ // there might be outstanding mutable references to values that we must not invalidate.
+ let leaf_ptr: *const _ = Self::as_leaf_ptr(&self);
+ unsafe { (*leaf_ptr).parent }
+ .as_ref()
+ .map(|parent| Handle {
+ node: NodeRef::from_internal(*parent, self.height + 1),
+ idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) },
+ _marker: PhantomData,
+ })
+ .ok_or(self)
+ }
+
+ pub fn first_edge(self) -> Handle<Self, marker::Edge> {
+ unsafe { Handle::new_edge(self, 0) }
+ }
+
+ pub fn last_edge(self) -> Handle<Self, marker::Edge> {
+ let len = self.len();
+ unsafe { Handle::new_edge(self, len) }
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn first_kv(self) -> Handle<Self, marker::KV> {
+ let len = self.len();
+ assert!(len > 0);
+ unsafe { Handle::new_kv(self, 0) }
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn last_kv(self) -> Handle<Self, marker::KV> {
+ let len = self.len();
+ assert!(len > 0);
+ unsafe { Handle::new_kv(self, len - 1) }
+ }
+}
+
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Could be a public implementation of PartialEq, but only used in this module.
+ fn eq(&self, other: &Self) -> bool {
+ let Self { node, height, _marker } = self;
+ if node.eq(&other.node) {
+ debug_assert_eq!(*height, other.height);
+ true
+ } else {
+ false
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
+ /// Exposes the leaf portion of any leaf or internal node in an immutable tree.
+ fn into_leaf(self) -> &'a LeafNode<K, V> {
+ let ptr = Self::as_leaf_ptr(&self);
+ // SAFETY: there can be no mutable references into this tree borrowed as `Immut`.
+ unsafe { &*ptr }
+ }
+
+ /// Borrows a view into the keys stored in the node.
+ pub fn keys(&self) -> &[K] {
+ let leaf = self.into_leaf();
+ unsafe {
+ MaybeUninit::slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len)))
+ }
+ }
+}
+
+impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocates the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
+ pub unsafe fn deallocate_and_ascend<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
+ let height = self.height;
+ let node = self.node;
+ let ret = self.ascend().ok();
+ unsafe {
+ alloc.deallocate(
+ node.cast(),
+ if height > 0 {
+ Layout::new::<InternalNode<K, V>>()
+ } else {
+ Layout::new::<LeafNode<K, V>>()
+ },
+ );
+ }
+ ret
+ }
+}
+
+impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Temporarily takes out another mutable reference to the same node. Beware, as
+ /// this method is very dangerous, doubly so since it might not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree, the returned
+ /// pointer can easily be used to make the original pointer dangling, out of
+ /// bounds, or invalid under stacked borrow rules.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef`
+ // that restricts the use of navigation methods on reborrowed pointers,
+ // preventing this unsafety.
+ unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+
+ /// Borrows exclusive access to the leaf portion of a leaf or internal node.
+ fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
+ let ptr = Self::as_leaf_ptr(self);
+ // SAFETY: we have exclusive access to the entire node.
+ unsafe { &mut *ptr }
+ }
+
+ /// Offers exclusive access to the leaf portion of a leaf or internal node.
+ fn into_leaf_mut(mut self) -> &'a mut LeafNode<K, V> {
+ let ptr = Self::as_leaf_ptr(&mut self);
+ // SAFETY: we have exclusive access to the entire node.
+ unsafe { &mut *ptr }
+ }
+}
+
+impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
+ /// Borrows exclusive access to the leaf portion of a dying leaf or internal node.
+ fn as_leaf_dying(&mut self) -> &mut LeafNode<K, V> {
+ let ptr = Self::as_leaf_ptr(self);
+ // SAFETY: we have exclusive access to the entire node.
+ unsafe { &mut *ptr }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Borrows exclusive access to an element of the key storage area.
+ ///
+ /// # Safety
+ /// `index` is in bounds of 0..CAPACITY
+ unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
+ where
+ I: SliceIndex<[MaybeUninit<K>], Output = Output>,
+ {
+ // SAFETY: the caller will not be able to call further methods on self
+ // until the key slice reference is dropped, as we have unique access
+ // for the lifetime of the borrow.
+ unsafe { self.as_leaf_mut().keys.as_mut_slice().get_unchecked_mut(index) }
+ }
+
+ /// Borrows exclusive access to an element or slice of the node's value storage area.
+ ///
+ /// # Safety
+ /// `index` is in bounds of 0..CAPACITY
+ unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
+ where
+ I: SliceIndex<[MaybeUninit<V>], Output = Output>,
+ {
+ // SAFETY: the caller will not be able to call further methods on self
+ // until the value slice reference is dropped, as we have unique access
+ // for the lifetime of the borrow.
+ unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// Borrows exclusive access to an element or slice of the node's storage area for edge contents.
+ ///
+ /// # Safety
+ /// `index` is in bounds of 0..CAPACITY + 1
+ unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
+ where
+ I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
+ {
+ // SAFETY: the caller will not be able to call further methods on self
+ // until the edge slice reference is dropped, as we have unique access
+ // for the lifetime of the borrow.
+ unsafe { self.as_internal_mut().edges.as_mut_slice().get_unchecked_mut(index) }
+ }
+}
+
+impl<'a, K, V, Type> NodeRef<marker::ValMut<'a>, K, V, Type> {
+ /// # Safety
+ /// - The node has more than `idx` initialized elements.
+ unsafe fn into_key_val_mut_at(mut self, idx: usize) -> (&'a K, &'a mut V) {
+ // We only create a reference to the one element we are interested in,
+ // to avoid aliasing with outstanding references to other elements,
+ // in particular, those returned to the caller in earlier iterations.
+ let leaf = Self::as_leaf_ptr(&mut self);
+ let keys = unsafe { ptr::addr_of!((*leaf).keys) };
+ let vals = unsafe { ptr::addr_of_mut!((*leaf).vals) };
+ // We must coerce to unsized array pointers because of Rust issue #74679.
+ let keys: *const [_] = keys;
+ let vals: *mut [_] = vals;
+ let key = unsafe { (&*keys.get_unchecked(idx)).assume_init_ref() };
+ let val = unsafe { (&mut *vals.get_unchecked_mut(idx)).assume_init_mut() };
+ (key, val)
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Borrows exclusive access to the length of the node.
+ pub fn len_mut(&mut self) -> &mut u16 {
+ &mut self.as_leaf_mut().len
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// # Safety
+ /// Every item returned by `range` is a valid edge index for the node.
+ unsafe fn correct_childrens_parent_links<R: Iterator<Item = usize>>(&mut self, range: R) {
+ for i in range {
+ debug_assert!(i <= self.len());
+ unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
+ }
+ }
+
+ fn correct_all_childrens_parent_links(&mut self) {
+ let len = self.len();
+ unsafe { self.correct_childrens_parent_links(0..=len) };
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Sets the node's link to its parent edge,
+ /// without invalidating other references to the node.
+ fn set_parent_link(&mut self, parent: NonNull<InternalNode<K, V>>, parent_idx: usize) {
+ let leaf = Self::as_leaf_ptr(self);
+ unsafe { (*leaf).parent = Some(parent) };
+ unsafe { (*leaf).parent_idx.write(parent_idx as u16) };
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+ /// Clears the root's link to its parent edge.
+ fn clear_parent_link(&mut self) {
+ let mut root_node = self.borrow_mut();
+ let leaf = root_node.as_leaf_mut();
+ leaf.parent = None;
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+ /// Returns a new owned tree, with its own root node that is initially empty.
+ pub fn new<A: Allocator + Clone>(alloc: A) -> Self {
+ NodeRef::new_leaf(alloc).forget_type()
+ }
+
+ /// Adds a new internal node with a single edge pointing to the previous root node,
+ /// make that new node the root node, and return it. This increases the height by 1
+ /// and is the opposite of `pop_internal_level`.
+ pub fn push_internal_level<A: Allocator + Clone>(
+ &mut self,
+ alloc: A,
+ ) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
+ super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
+
+ // `self.borrow_mut()`, except that we just forgot we're internal now:
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+
+ /// Removes the internal root node, using its first child as the new root node.
+ /// As it is intended only to be called when the root node has only one child,
+ /// no cleanup is done on any of the keys, values and other children.
+ /// This decreases the height by 1 and is the opposite of `push_internal_level`.
+ ///
+ /// Requires exclusive access to the `NodeRef` object but not to the root node;
+ /// it will not invalidate other handles or references to the root node.
+ ///
+ /// Panics if there is no internal level, i.e., if the root node is a leaf.
+ pub fn pop_internal_level<A: Allocator + Clone>(&mut self, alloc: A) {
+ assert!(self.height > 0);
+
+ let top = self.node;
+
+ // SAFETY: we asserted to be internal.
+ let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() };
+ // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive.
+ let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) };
+ // SAFETY: the first edge is always initialized.
+ self.node = unsafe { internal_node.edges[0].assume_init_read() };
+ self.height -= 1;
+ self.clear_parent_link();
+
+ unsafe {
+ alloc.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
+ }
+ }
+}
+
+impl<K, V, Type> NodeRef<marker::Owned, K, V, Type> {
+ /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe
+ /// because the return value cannot be used to destroy the root, and there
+ /// cannot be other references to the tree.
+ pub fn borrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+
+ /// Slightly mutably borrows the owned root node.
+ pub fn borrow_valmut(&mut self) -> NodeRef<marker::ValMut<'_>, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+
+ /// Irreversibly transitions to a reference that permits traversal and offers
+ /// destructive methods and little else.
+ pub fn into_dying(self) -> NodeRef<marker::Dying, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+ /// Adds a key-value pair to the end of the node, and returns
+ /// the mutable reference of the inserted value.
+ pub fn push(&mut self, key: K, val: V) -> &mut V {
+ let len = self.len_mut();
+ let idx = usize::from(*len);
+ assert!(idx < CAPACITY);
+ *len += 1;
+ unsafe {
+ self.key_area_mut(idx).write(key);
+ self.val_area_mut(idx).write(val)
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// Adds a key-value pair, and an edge to go to the right of that pair,
+ /// to the end of the node.
+ pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
+ assert!(edge.height == self.height - 1);
+
+ let len = self.len_mut();
+ let idx = usize::from(*len);
+ assert!(idx < CAPACITY);
+ *len += 1;
+ unsafe {
+ self.key_area_mut(idx).write(key);
+ self.val_area_mut(idx).write(val);
+ self.edge_area_mut(idx + 1).write(edge.node);
+ Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
+ }
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Leaf> {
+ /// Removes any static information asserting that this node is a `Leaf` node.
+ pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+ /// Removes any static information asserting that this node is an `Internal` node.
+ pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Checks whether a node is an `Internal` node or a `Leaf` node.
+ pub fn force(
+ self,
+ ) -> ForceResult<
+ NodeRef<BorrowType, K, V, marker::Leaf>,
+ NodeRef<BorrowType, K, V, marker::Internal>,
+ > {
+ if self.height == 0 {
+ ForceResult::Leaf(NodeRef {
+ height: self.height,
+ node: self.node,
+ _marker: PhantomData,
+ })
+ } else {
+ ForceResult::Internal(NodeRef {
+ height: self.height,
+ node: self.node,
+ _marker: PhantomData,
+ })
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Unsafely asserts to the compiler the static information that this node is a `Leaf`.
+ unsafe fn cast_to_leaf_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+ debug_assert!(self.height == 0);
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+
+ /// Unsafely asserts to the compiler the static information that this node is an `Internal`.
+ unsafe fn cast_to_internal_unchecked(self) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ debug_assert!(self.height > 0);
+ NodeRef { height: self.height, node: self.node, _marker: PhantomData }
+ }
+}
+
+/// A reference to a specific key-value pair or edge within a node. The `Node` parameter
+/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key-value
+/// pair) or `Edge` (signifying a handle on an edge).
+///
+/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
+/// a child node, these represent the spaces where child pointers would go between the key-value
+/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
+/// to the left of the node, one between the two pairs, and one at the right of the node.
+pub struct Handle<Node, Type> {
+ node: Node,
+ idx: usize,
+ _marker: PhantomData<Type>,
+}
+
+impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
+// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
+// `Clone`able is when it is an immutable reference and therefore `Copy`.
+impl<Node: Copy, Type> Clone for Handle<Node, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<Node, Type> Handle<Node, Type> {
+ /// Retrieves the node that contains the edge or key-value pair this handle points to.
+ pub fn into_node(self) -> Node {
+ self.node
+ }
+
+ /// Returns the position of this handle in the node.
+ pub fn idx(&self) -> usize {
+ self.idx
+ }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
+ /// Creates a new handle to a key-value pair in `node`.
+ /// Unsafe because the caller must ensure that `idx < node.len()`.
+ pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ debug_assert!(idx < node.len());
+
+ Handle { node, idx, _marker: PhantomData }
+ }
+
+ pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node, self.idx) }
+ }
+
+ pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node, self.idx + 1) }
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq
+ for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+ fn eq(&self, other: &Self) -> bool {
+ let Self { node, idx, _marker } = self;
+ node.eq(&other.node) && *idx == other.idx
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+ /// Temporarily takes out another immutable handle on the same location.
+ pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
+ }
+}
+
+impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
+ /// Temporarily takes out another mutable handle on the same location. Beware, as
+ /// this method is very dangerous, doubly so since it might not immediately appear
+ /// dangerous.
+ ///
+ /// For details, see `NodeRef::reborrow_mut`.
+ pub unsafe fn reborrow_mut(
+ &mut self,
+ ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
+ }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ /// Creates a new handle to an edge in `node`.
+ /// Unsafe because the caller must ensure that `idx <= node.len()`.
+ pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ debug_assert!(idx <= node.len());
+
+ Handle { node, idx, _marker: PhantomData }
+ }
+
+ pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+ if self.idx > 0 {
+ Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
+ } else {
+ Err(self)
+ }
+ }
+
+ pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+ if self.idx < self.node.len() {
+ Ok(unsafe { Handle::new_kv(self.node, self.idx) })
+ } else {
+ Err(self)
+ }
+ }
+}
+
+pub enum LeftOrRight<T> {
+ Left(T),
+ Right(T),
+}
+
+/// Given an edge index where we want to insert into a node filled to capacity,
+/// computes a sensible KV index of a split point and where to perform the insertion.
+/// The goal of the split point is for its key and value to end up in a parent node;
+/// the keys, values and edges to the left of the split point become the left child;
+/// the keys, values and edges to the right of the split point become the right child.
+fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight<usize>) {
+ debug_assert!(edge_idx <= CAPACITY);
+ // Rust issue #74834 tries to explain these symmetric rules.
+ match edge_idx {
+ 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)),
+ EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)),
+ EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)),
+ _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))),
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Inserts a new key-value pair between the key-value pairs to the right and left of
+ /// this edge. This method assumes that there is enough space in the node for the new
+ /// pair to fit.
+ ///
+ /// The returned pointer points to the inserted value.
+ fn insert_fit(&mut self, key: K, val: V) -> *mut V {
+ debug_assert!(self.node.len() < CAPACITY);
+ let new_len = self.node.len() + 1;
+
+ unsafe {
+ slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
+ slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
+ *self.node.len_mut() = new_len as u16;
+
+ self.node.val_area_mut(self.idx).assume_init_mut()
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Inserts a new key-value pair between the key-value pairs to the right and left of
+ /// this edge. This method splits the node if there isn't enough room.
+ ///
+ /// The returned pointer points to the inserted value.
+ fn insert<A: Allocator + Clone>(
+ mut self,
+ key: K,
+ val: V,
+ alloc: A,
+ ) -> (Option<SplitResult<'a, K, V, marker::Leaf>>, *mut V) {
+ if self.node.len() < CAPACITY {
+ let val_ptr = self.insert_fit(key, val);
+ (None, val_ptr)
+ } else {
+ let (middle_kv_idx, insertion) = splitpoint(self.idx);
+ let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
+ let mut result = middle.split(alloc);
+ let mut insertion_edge = match insertion {
+ LeftOrRight::Left(insert_idx) => unsafe {
+ Handle::new_edge(result.left.reborrow_mut(), insert_idx)
+ },
+ LeftOrRight::Right(insert_idx) => unsafe {
+ Handle::new_edge(result.right.borrow_mut(), insert_idx)
+ },
+ };
+ let val_ptr = insertion_edge.insert_fit(key, val);
+ (Some(result), val_ptr)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ /// Fixes the parent pointer and index in the child node that this edge
+ /// links to. This is useful when the ordering of edges has been changed,
+ fn correct_parent_link(self) {
+ // Create backpointer without invalidating other references to the node.
+ let ptr = unsafe { NonNull::new_unchecked(NodeRef::as_internal_ptr(&self.node)) };
+ let idx = self.idx;
+ let mut child = self.descend();
+ child.set_parent_link(ptr, idx);
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ /// Inserts a new key-value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key-value pair to the right of this edge. This method assumes
+ /// that there is enough space in the node for the new pair to fit.
+ fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
+ debug_assert!(self.node.len() < CAPACITY);
+ debug_assert!(edge.height == self.node.height - 1);
+ let new_len = self.node.len() + 1;
+
+ unsafe {
+ slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
+ slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
+ slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
+ *self.node.len_mut() = new_len as u16;
+
+ self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
+ }
+ }
+
+ /// Inserts a new key-value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key-value pair to the right of this edge. This method splits
+ /// the node if there isn't enough room.
+ fn insert<A: Allocator + Clone>(
+ mut self,
+ key: K,
+ val: V,
+ edge: Root<K, V>,
+ alloc: A,
+ ) -> Option<SplitResult<'a, K, V, marker::Internal>> {
+ assert!(edge.height == self.node.height - 1);
+
+ if self.node.len() < CAPACITY {
+ self.insert_fit(key, val, edge);
+ None
+ } else {
+ let (middle_kv_idx, insertion) = splitpoint(self.idx);
+ let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
+ let mut result = middle.split(alloc);
+ let mut insertion_edge = match insertion {
+ LeftOrRight::Left(insert_idx) => unsafe {
+ Handle::new_edge(result.left.reborrow_mut(), insert_idx)
+ },
+ LeftOrRight::Right(insert_idx) => unsafe {
+ Handle::new_edge(result.right.borrow_mut(), insert_idx)
+ },
+ };
+ insertion_edge.insert_fit(key, val, edge);
+ Some(result)
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Inserts a new key-value pair between the key-value pairs to the right and left of
+ /// this edge. This method splits the node if there isn't enough room, and tries to
+ /// insert the split off portion into the parent node recursively, until the root is reached.
+ ///
+ /// If the returned result is some `SplitResult`, the `left` field will be the root node.
+ /// The returned pointer points to the inserted value, which in the case of `SplitResult`
+ /// is in the `left` or `right` tree.
+ pub fn insert_recursing<A: Allocator + Clone>(
+ self,
+ key: K,
+ value: V,
+ alloc: A,
+ ) -> (Option<SplitResult<'a, K, V, marker::LeafOrInternal>>, *mut V) {
+ let (mut split, val_ptr) = match self.insert(key, value, alloc.clone()) {
+ (None, val_ptr) => return (None, val_ptr),
+ (Some(split), val_ptr) => (split.forget_node_type(), val_ptr),
+ };
+
+ loop {
+ split = match split.left.ascend() {
+ Ok(parent) => {
+ match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
+ None => return (None, val_ptr),
+ Some(split) => split.forget_node_type(),
+ }
+ }
+ Err(root) => return (Some(SplitResult { left: root, ..split }), val_ptr),
+ };
+ }
+ }
+}
+
+impl<BorrowType: marker::BorrowType, K, V>
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>
+{
+ /// Finds the node pointed to by this edge.
+ ///
+ /// The method name assumes you picture trees with the root node on top.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ assert!(BorrowType::PERMITS_TRAVERSAL);
+ // We need to use raw pointers to nodes because, if BorrowType is
+ // marker::ValMut, there might be outstanding mutable references to
+ // values that we must not invalidate. There's no worry accessing the
+ // height field because that value is copied. Beware that, once the
+ // node pointer is dereferenced, we access the edges array with a
+ // reference (Rust issue #73987) and invalidate any other references
+ // to or inside the array, should any be around.
+ let parent_ptr = NodeRef::as_internal_ptr(&self.node);
+ let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() };
+ NodeRef { node, height: self.node.height - 1, _marker: PhantomData }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn into_kv(self) -> (&'a K, &'a V) {
+ debug_assert!(self.idx < self.node.len());
+ let leaf = self.node.into_leaf();
+ let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
+ let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() };
+ (k, v)
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn key_mut(&mut self) -> &mut K {
+ unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
+ }
+
+ pub fn into_val_mut(self) -> &'a mut V {
+ debug_assert!(self.idx < self.node.len());
+ let leaf = self.node.into_leaf_mut();
+ unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
+ }
+}
+
+impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
+ unsafe { self.node.into_key_val_mut_at(self.idx) }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
+ debug_assert!(self.idx < self.node.len());
+ // We cannot call separate key and value methods, because calling the second one
+ // invalidates the reference returned by the first.
+ unsafe {
+ let leaf = self.node.as_leaf_mut();
+ let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut();
+ let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut();
+ (key, val)
+ }
+ }
+
+ /// Replaces the key and value that the KV handle refers to.
+ pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) {
+ let (key, val) = self.kv_mut();
+ (mem::replace(key, k), mem::replace(val, v))
+ }
+}
+
+impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
+ /// Extracts the key and value that the KV handle refers to.
+ /// # Safety
+ /// The node that the handle refers to must not yet have been deallocated.
+ pub unsafe fn into_key_val(mut self) -> (K, V) {
+ debug_assert!(self.idx < self.node.len());
+ let leaf = self.node.as_leaf_dying();
+ unsafe {
+ let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_read();
+ let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_read();
+ (key, val)
+ }
+ }
+
+ /// Drops the key and value that the KV handle refers to.
+ /// # Safety
+ /// The node that the handle refers to must not yet have been deallocated.
+ #[inline]
+ pub unsafe fn drop_key_val(mut self) {
+ debug_assert!(self.idx < self.node.len());
+ let leaf = self.node.as_leaf_dying();
+ unsafe {
+ leaf.keys.get_unchecked_mut(self.idx).assume_init_drop();
+ leaf.vals.get_unchecked_mut(self.idx).assume_init_drop();
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ /// Helps implementations of `split` for a particular `NodeType`,
+ /// by taking care of leaf data.
+ fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
+ debug_assert!(self.idx < self.node.len());
+ let old_len = self.node.len();
+ let new_len = old_len - self.idx - 1;
+ new_node.len = new_len as u16;
+ unsafe {
+ let k = self.node.key_area_mut(self.idx).assume_init_read();
+ let v = self.node.val_area_mut(self.idx).assume_init_read();
+
+ move_to_slice(
+ self.node.key_area_mut(self.idx + 1..old_len),
+ &mut new_node.keys[..new_len],
+ );
+ move_to_slice(
+ self.node.val_area_mut(self.idx + 1..old_len),
+ &mut new_node.vals[..new_len],
+ );
+
+ *self.node.len_mut() = self.idx as u16;
+ (k, v)
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the key-value pairs to the left of
+ /// this handle.
+ /// - The key and value pointed to by this handle are extracted.
+ /// - All the key-value pairs to the right of this handle are put into a newly
+ /// allocated node.
+ pub fn split<A: Allocator + Clone>(mut self, alloc: A) -> SplitResult<'a, K, V, marker::Leaf> {
+ let mut new_node = LeafNode::new(alloc);
+
+ let kv = self.split_leaf_data(&mut new_node);
+
+ let right = NodeRef::from_new_leaf(new_node);
+ SplitResult { left: self.node, kv, right }
+ }
+
+ /// Removes the key-value pair pointed to by this handle and returns it, along with the edge
+ /// that the key-value pair collapsed into.
+ pub fn remove(
+ mut self,
+ ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
+ let old_len = self.node.len();
+ unsafe {
+ let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
+ let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
+ *self.node.len_mut() = (old_len - 1) as u16;
+ ((k, v), self.left_edge())
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the edges and key-value pairs to the
+ /// left of this handle.
+ /// - The key and value pointed to by this handle are extracted.
+ /// - All the edges and key-value pairs to the right of this handle are put into
+ /// a newly allocated node.
+ pub fn split<A: Allocator + Clone>(
+ mut self,
+ alloc: A,
+ ) -> SplitResult<'a, K, V, marker::Internal> {
+ let old_len = self.node.len();
+ unsafe {
+ let mut new_node = InternalNode::new(alloc);
+ let kv = self.split_leaf_data(&mut new_node.data);
+ let new_len = usize::from(new_node.data.len);
+ move_to_slice(
+ self.node.edge_area_mut(self.idx + 1..old_len + 1),
+ &mut new_node.edges[..new_len + 1],
+ );
+
+ let height = self.node.height;
+ let right = NodeRef::from_new_internal(new_node, height);
+
+ SplitResult { left: self.node, kv, right }
+ }
+ }
+}
+
+/// Represents a session for evaluating and performing a balancing operation
+/// around an internal key-value pair.
+pub struct BalancingContext<'a, K, V> {
+ parent: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV>,
+ left_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+ right_child: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ pub fn consider_for_balancing(self) -> BalancingContext<'a, K, V> {
+ let self1 = unsafe { ptr::read(&self) };
+ let self2 = unsafe { ptr::read(&self) };
+ BalancingContext {
+ parent: self,
+ left_child: self1.left_edge().descend(),
+ right_child: self2.right_edge().descend(),
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Chooses a balancing context involving the node as a child, thus between
+ /// the KV immediately to the left or to the right in the parent node.
+ /// Returns an `Err` if there is no parent.
+ /// Panics if the parent is empty.
+ ///
+ /// Prefers the left side, to be optimal if the given node is somehow
+ /// underfull, meaning here only that it has fewer elements than its left
+ /// sibling and than its right sibling, if they exist. In that case,
+ /// merging with the left sibling is faster, since we only need to move
+ /// the node's N elements, instead of shifting them to the right and moving
+ /// more than N elements in front. Stealing from the left sibling is also
+ /// typically faster, since we only need to shift the node's N elements to
+ /// the right, instead of shifting at least N of the sibling's elements to
+ /// the left.
+ pub fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
+ match unsafe { ptr::read(&self) }.ascend() {
+ Ok(parent_edge) => match parent_edge.left_kv() {
+ Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
+ parent: unsafe { ptr::read(&left_parent_kv) },
+ left_child: left_parent_kv.left_edge().descend(),
+ right_child: self,
+ })),
+ Err(parent_edge) => match parent_edge.right_kv() {
+ Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
+ parent: unsafe { ptr::read(&right_parent_kv) },
+ left_child: self,
+ right_child: right_parent_kv.right_edge().descend(),
+ })),
+ Err(_) => unreachable!("empty internal node"),
+ },
+ },
+ Err(root) => Err(root),
+ }
+ }
+}
+
+impl<'a, K, V> BalancingContext<'a, K, V> {
+ pub fn left_child_len(&self) -> usize {
+ self.left_child.len()
+ }
+
+ pub fn right_child_len(&self) -> usize {
+ self.right_child.len()
+ }
+
+ pub fn into_left_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ self.left_child
+ }
+
+ pub fn into_right_child(self) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ self.right_child
+ }
+
+ /// Returns whether merging is possible, i.e., whether there is enough room
+ /// in a node to combine the central KV with both adjacent child nodes.
+ pub fn can_merge(&self) -> bool {
+ self.left_child.len() + 1 + self.right_child.len() <= CAPACITY
+ }
+}
+
+impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
+ /// Performs a merge and lets a closure decide what to return.
+ fn do_merge<
+ F: FnOnce(
+ NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
+ NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+ ) -> R,
+ R,
+ A: Allocator,
+ >(
+ self,
+ result: F,
+ alloc: A,
+ ) -> R {
+ let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
+ let old_parent_len = parent_node.len();
+ let mut left_node = self.left_child;
+ let old_left_len = left_node.len();
+ let mut right_node = self.right_child;
+ let right_len = right_node.len();
+ let new_left_len = old_left_len + 1 + right_len;
+
+ assert!(new_left_len <= CAPACITY);
+
+ unsafe {
+ *left_node.len_mut() = new_left_len as u16;
+
+ let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
+ left_node.key_area_mut(old_left_len).write(parent_key);
+ move_to_slice(
+ right_node.key_area_mut(..right_len),
+ left_node.key_area_mut(old_left_len + 1..new_left_len),
+ );
+
+ let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
+ left_node.val_area_mut(old_left_len).write(parent_val);
+ move_to_slice(
+ right_node.val_area_mut(..right_len),
+ left_node.val_area_mut(old_left_len + 1..new_left_len),
+ );
+
+ slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
+ parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
+ *parent_node.len_mut() -= 1;
+
+ if parent_node.height > 1 {
+ // SAFETY: the height of the nodes being merged is one below the height
+ // of the node of this edge, thus above zero, so they are internal.
+ let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
+ let mut right_node = right_node.cast_to_internal_unchecked();
+ move_to_slice(
+ right_node.edge_area_mut(..right_len + 1),
+ left_node.edge_area_mut(old_left_len + 1..new_left_len + 1),
+ );
+
+ left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
+
+ alloc.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
+ } else {
+ alloc.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
+ }
+ }
+ result(parent_node, left_node)
+ }
+
+ /// Merges the parent's key-value pair and both adjacent child nodes into
+ /// the left child node and returns the shrunk parent node.
+ ///
+ /// Panics unless we `.can_merge()`.
+ pub fn merge_tracking_parent<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ self.do_merge(|parent, _child| parent, alloc)
+ }
+
+ /// Merges the parent's key-value pair and both adjacent child nodes into
+ /// the left child node and returns that child node.
+ ///
+ /// Panics unless we `.can_merge()`.
+ pub fn merge_tracking_child<A: Allocator + Clone>(
+ self,
+ alloc: A,
+ ) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ self.do_merge(|_parent, child| child, alloc)
+ }
+
+ /// Merges the parent's key-value pair and both adjacent child nodes into
+ /// the left child node and returns the edge handle in that child node
+ /// where the tracked child edge ended up,
+ ///
+ /// Panics unless we `.can_merge()`.
+ pub fn merge_tracking_child_edge<A: Allocator + Clone>(
+ self,
+ track_edge_idx: LeftOrRight<usize>,
+ alloc: A,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+ let old_left_len = self.left_child.len();
+ let right_len = self.right_child.len();
+ assert!(match track_edge_idx {
+ LeftOrRight::Left(idx) => idx <= old_left_len,
+ LeftOrRight::Right(idx) => idx <= right_len,
+ });
+ let child = self.merge_tracking_child(alloc);
+ let new_idx = match track_edge_idx {
+ LeftOrRight::Left(idx) => idx,
+ LeftOrRight::Right(idx) => old_left_len + 1 + idx,
+ };
+ unsafe { Handle::new_edge(child, new_idx) }
+ }
+
+ /// Removes a key-value pair from the left child and places it in the key-value storage
+ /// of the parent, while pushing the old parent key-value pair into the right child.
+ /// Returns a handle to the edge in the right child corresponding to where the original
+ /// edge specified by `track_right_edge_idx` ended up.
+ pub fn steal_left(
+ mut self,
+ track_right_edge_idx: usize,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+ self.bulk_steal_left(1);
+ unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) }
+ }
+
+ /// Removes a key-value pair from the right child and places it in the key-value storage
+ /// of the parent, while pushing the old parent key-value pair onto the left child.
+ /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`,
+ /// which didn't move.
+ pub fn steal_right(
+ mut self,
+ track_left_edge_idx: usize,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+ self.bulk_steal_right(1);
+ unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) }
+ }
+
+ /// This does stealing similar to `steal_left` but steals multiple elements at once.
+ pub fn bulk_steal_left(&mut self, count: usize) {
+ assert!(count > 0);
+ unsafe {
+ let left_node = &mut self.left_child;
+ let old_left_len = left_node.len();
+ let right_node = &mut self.right_child;
+ let old_right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ assert!(old_right_len + count <= CAPACITY);
+ assert!(old_left_len >= count);
+
+ let new_left_len = old_left_len - count;
+ let new_right_len = old_right_len + count;
+ *left_node.len_mut() = new_left_len as u16;
+ *right_node.len_mut() = new_right_len as u16;
+
+ // Move leaf data.
+ {
+ // Make room for stolen elements in the right child.
+ slice_shr(right_node.key_area_mut(..new_right_len), count);
+ slice_shr(right_node.val_area_mut(..new_right_len), count);
+
+ // Move elements from the left child to the right one.
+ move_to_slice(
+ left_node.key_area_mut(new_left_len + 1..old_left_len),
+ right_node.key_area_mut(..count - 1),
+ );
+ move_to_slice(
+ left_node.val_area_mut(new_left_len + 1..old_left_len),
+ right_node.val_area_mut(..count - 1),
+ );
+
+ // Move the left-most stolen pair to the parent.
+ let k = left_node.key_area_mut(new_left_len).assume_init_read();
+ let v = left_node.val_area_mut(new_left_len).assume_init_read();
+ let (k, v) = self.parent.replace_kv(k, v);
+
+ // Move parent's key-value pair to the right child.
+ right_node.key_area_mut(count - 1).write(k);
+ right_node.val_area_mut(count - 1).write(v);
+ }
+
+ match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
+ (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
+ // Make room for stolen edges.
+ slice_shr(right.edge_area_mut(..new_right_len + 1), count);
+
+ // Steal edges.
+ move_to_slice(
+ left.edge_area_mut(new_left_len + 1..old_left_len + 1),
+ right.edge_area_mut(..count),
+ );
+
+ right.correct_childrens_parent_links(0..new_right_len + 1);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => unreachable!(),
+ }
+ }
+ }
+
+ /// The symmetric clone of `bulk_steal_left`.
+ pub fn bulk_steal_right(&mut self, count: usize) {
+ assert!(count > 0);
+ unsafe {
+ let left_node = &mut self.left_child;
+ let old_left_len = left_node.len();
+ let right_node = &mut self.right_child;
+ let old_right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ assert!(old_left_len + count <= CAPACITY);
+ assert!(old_right_len >= count);
+
+ let new_left_len = old_left_len + count;
+ let new_right_len = old_right_len - count;
+ *left_node.len_mut() = new_left_len as u16;
+ *right_node.len_mut() = new_right_len as u16;
+
+ // Move leaf data.
+ {
+ // Move the right-most stolen pair to the parent.
+ let k = right_node.key_area_mut(count - 1).assume_init_read();
+ let v = right_node.val_area_mut(count - 1).assume_init_read();
+ let (k, v) = self.parent.replace_kv(k, v);
+
+ // Move parent's key-value pair to the left child.
+ left_node.key_area_mut(old_left_len).write(k);
+ left_node.val_area_mut(old_left_len).write(v);
+
+ // Move elements from the right child to the left one.
+ move_to_slice(
+ right_node.key_area_mut(..count - 1),
+ left_node.key_area_mut(old_left_len + 1..new_left_len),
+ );
+ move_to_slice(
+ right_node.val_area_mut(..count - 1),
+ left_node.val_area_mut(old_left_len + 1..new_left_len),
+ );
+
+ // Fill gap where stolen elements used to be.
+ slice_shl(right_node.key_area_mut(..old_right_len), count);
+ slice_shl(right_node.val_area_mut(..old_right_len), count);
+ }
+
+ match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
+ (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
+ // Steal edges.
+ move_to_slice(
+ right.edge_area_mut(..count),
+ left.edge_area_mut(old_left_len + 1..new_left_len + 1),
+ );
+
+ // Fill gap where stolen edges used to be.
+ slice_shl(right.edge_area_mut(..old_right_len + 1), count);
+
+ left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1);
+ right.correct_childrens_parent_links(0..new_right_len + 1);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => unreachable!(),
+ }
+ }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ pub fn forget_node_type(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+ pub fn forget_node_type(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
+ pub fn forget_node_type(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
+ unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
+ }
+}
+
+impl<BorrowType, K, V, Type> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, Type> {
+ /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
+ pub fn force(
+ self,
+ ) -> ForceResult<
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, Type>,
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, Type>,
+ > {
+ match self.node.force() {
+ ForceResult::Leaf(node) => {
+ ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
+ }
+ ForceResult::Internal(node) => {
+ ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
+ }
+ }
+ }
+}
+
+impl<'a, K, V, Type> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, Type> {
+ /// Unsafely asserts to the compiler the static information that the handle's node is a `Leaf`.
+ pub unsafe fn cast_to_leaf_unchecked(
+ self,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, Type> {
+ let node = unsafe { self.node.cast_to_leaf_unchecked() };
+ Handle { node, idx: self.idx, _marker: PhantomData }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+ /// Move the suffix after `self` from one node to another one. `right` must be empty.
+ /// The first edge of `right` remains unchanged.
+ pub fn move_suffix(
+ &mut self,
+ right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+ ) {
+ unsafe {
+ let new_left_len = self.idx;
+ let mut left_node = self.reborrow_mut().into_node();
+ let old_left_len = left_node.len();
+
+ let new_right_len = old_left_len - new_left_len;
+ let mut right_node = right.reborrow_mut();
+
+ assert!(right_node.len() == 0);
+ assert!(left_node.height == right_node.height);
+
+ if new_right_len > 0 {
+ *left_node.len_mut() = new_left_len as u16;
+ *right_node.len_mut() = new_right_len as u16;
+
+ move_to_slice(
+ left_node.key_area_mut(new_left_len..old_left_len),
+ right_node.key_area_mut(..new_right_len),
+ );
+ move_to_slice(
+ left_node.val_area_mut(new_left_len..old_left_len),
+ right_node.val_area_mut(..new_right_len),
+ );
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => {
+ move_to_slice(
+ left.edge_area_mut(new_left_len + 1..old_left_len + 1),
+ right.edge_area_mut(1..new_right_len + 1),
+ );
+ right.correct_childrens_parent_links(1..new_right_len + 1);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => unreachable!(),
+ }
+ }
+ }
+ }
+}
+
+pub enum ForceResult<Leaf, Internal> {
+ Leaf(Leaf),
+ Internal(Internal),
+}
+
+/// Result of insertion, when a node needed to expand beyond its capacity.
+pub struct SplitResult<'a, K, V, NodeType> {
+ // Altered node in existing tree with elements and edges that belong to the left of `kv`.
+ pub left: NodeRef<marker::Mut<'a>, K, V, NodeType>,
+ // Some key and value that existed before and were split off, to be inserted elsewhere.
+ pub kv: (K, V),
+ // Owned, unattached, new node with elements and edges that belong to the right of `kv`.
+ pub right: NodeRef<marker::Owned, K, V, NodeType>,
+}
+
+impl<'a, K, V> SplitResult<'a, K, V, marker::Leaf> {
+ pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
+ SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
+ }
+}
+
+impl<'a, K, V> SplitResult<'a, K, V, marker::Internal> {
+ pub fn forget_node_type(self) -> SplitResult<'a, K, V, marker::LeafOrInternal> {
+ SplitResult { left: self.left.forget_type(), kv: self.kv, right: self.right.forget_type() }
+ }
+}
+
+pub mod marker {
+ use core::marker::PhantomData;
+
+ pub enum Leaf {}
+ pub enum Internal {}
+ pub enum LeafOrInternal {}
+
+ pub enum Owned {}
+ pub enum Dying {}
+ pub struct Immut<'a>(PhantomData<&'a ()>);
+ pub struct Mut<'a>(PhantomData<&'a mut ()>);
+ pub struct ValMut<'a>(PhantomData<&'a mut ()>);
+
+ pub trait BorrowType {
+ // Whether node references of this borrow type allow traversing
+ // to other nodes in the tree.
+ const PERMITS_TRAVERSAL: bool = true;
+ }
+ impl BorrowType for Owned {
+ // Traversal isn't needed, it happens using the result of `borrow_mut`.
+ // By disabling traversal, and only creating new references to roots,
+ // we know that every reference of the `Owned` type is to a root node.
+ const PERMITS_TRAVERSAL: bool = false;
+ }
+ impl BorrowType for Dying {}
+ impl<'a> BorrowType for Immut<'a> {}
+ impl<'a> BorrowType for Mut<'a> {}
+ impl<'a> BorrowType for ValMut<'a> {}
+
+ pub enum KV {}
+ pub enum Edge {}
+}
+
+/// Inserts a value into a slice of initialized elements followed by one uninitialized element.
+///
+/// # Safety
+/// The slice has more than `idx` elements.
+unsafe fn slice_insert<T>(slice: &mut [MaybeUninit<T>], idx: usize, val: T) {
+ unsafe {
+ let len = slice.len();
+ debug_assert!(len > idx);
+ let slice_ptr = slice.as_mut_ptr();
+ if len > idx + 1 {
+ ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1);
+ }
+ (*slice_ptr.add(idx)).write(val);
+ }
+}
+
+/// Removes and returns a value from a slice of all initialized elements, leaving behind one
+/// trailing uninitialized element.
+///
+/// # Safety
+/// The slice has more than `idx` elements.
+unsafe fn slice_remove<T>(slice: &mut [MaybeUninit<T>], idx: usize) -> T {
+ unsafe {
+ let len = slice.len();
+ debug_assert!(idx < len);
+ let slice_ptr = slice.as_mut_ptr();
+ let ret = (*slice_ptr.add(idx)).assume_init_read();
+ ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1);
+ ret
+ }
+}
+
+/// Shifts the elements in a slice `distance` positions to the left.
+///
+/// # Safety
+/// The slice has at least `distance` elements.
+unsafe fn slice_shl<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
+ unsafe {
+ let slice_ptr = slice.as_mut_ptr();
+ ptr::copy(slice_ptr.add(distance), slice_ptr, slice.len() - distance);
+ }
+}
+
+/// Shifts the elements in a slice `distance` positions to the right.
+///
+/// # Safety
+/// The slice has at least `distance` elements.
+unsafe fn slice_shr<T>(slice: &mut [MaybeUninit<T>], distance: usize) {
+ unsafe {
+ let slice_ptr = slice.as_mut_ptr();
+ ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance);
+ }
+}
+
+/// Moves all values from a slice of initialized elements to a slice
+/// of uninitialized elements, leaving behind `src` as all uninitialized.
+/// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`.
+fn move_to_slice<T>(src: &mut [MaybeUninit<T>], dst: &mut [MaybeUninit<T>]) {
+ assert!(src.len() == dst.len());
+ unsafe {
+ ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());
+ }
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/library/alloc/src/collections/btree/node/tests.rs b/library/alloc/src/collections/btree/node/tests.rs
new file mode 100644
index 000000000..aadb0dc9c
--- /dev/null
+++ b/library/alloc/src/collections/btree/node/tests.rs
@@ -0,0 +1,102 @@
+use super::super::navigate;
+use super::*;
+use crate::alloc::Global;
+use crate::fmt::Debug;
+use crate::string::String;
+
+impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
+ // Asserts that the back pointer in each reachable node points to its parent.
+ pub fn assert_back_pointers(self) {
+ if let ForceResult::Internal(node) = self.force() {
+ for idx in 0..=node.len() {
+ let edge = unsafe { Handle::new_edge(node, idx) };
+ let child = edge.descend();
+ assert!(child.ascend().ok() == Some(edge));
+ child.assert_back_pointers();
+ }
+ }
+ }
+
+ // Renders a multi-line display of the keys in order and in tree hierarchy,
+ // picturing the tree growing sideways from its root on the left to its
+ // leaves on the right.
+ pub fn dump_keys(self) -> String
+ where
+ K: Debug,
+ {
+ let mut result = String::new();
+ self.visit_nodes_in_order(|pos| match pos {
+ navigate::Position::Leaf(leaf) => {
+ let depth = self.height();
+ let indent = " ".repeat(depth);
+ result += &format!("\n{}{:?}", indent, leaf.keys());
+ }
+ navigate::Position::Internal(_) => {}
+ navigate::Position::InternalKV(kv) => {
+ let depth = self.height() - kv.into_node().height();
+ let indent = " ".repeat(depth);
+ result += &format!("\n{}{:?}", indent, kv.into_kv().0);
+ }
+ });
+ result
+ }
+}
+
+#[test]
+fn test_splitpoint() {
+ for idx in 0..=CAPACITY {
+ let (middle_kv_idx, insertion) = splitpoint(idx);
+
+ // Simulate performing the split:
+ let mut left_len = middle_kv_idx;
+ let mut right_len = CAPACITY - middle_kv_idx - 1;
+ match insertion {
+ LeftOrRight::Left(edge_idx) => {
+ assert!(edge_idx <= left_len);
+ left_len += 1;
+ }
+ LeftOrRight::Right(edge_idx) => {
+ assert!(edge_idx <= right_len);
+ right_len += 1;
+ }
+ }
+ assert!(left_len >= MIN_LEN_AFTER_SPLIT);
+ assert!(right_len >= MIN_LEN_AFTER_SPLIT);
+ assert!(left_len + right_len == CAPACITY);
+ }
+}
+
+#[test]
+fn test_partial_eq() {
+ let mut root1 = NodeRef::new_leaf(Global);
+ root1.borrow_mut().push(1, ());
+ let mut root1 = NodeRef::new_internal(root1.forget_type(), Global).forget_type();
+ let root2 = Root::new(Global);
+ root1.reborrow().assert_back_pointers();
+ root2.reborrow().assert_back_pointers();
+
+ let leaf_edge_1a = root1.reborrow().first_leaf_edge().forget_node_type();
+ let leaf_edge_1b = root1.reborrow().last_leaf_edge().forget_node_type();
+ let top_edge_1 = root1.reborrow().first_edge();
+ let top_edge_2 = root2.reborrow().first_edge();
+
+ assert!(leaf_edge_1a == leaf_edge_1a);
+ assert!(leaf_edge_1a != leaf_edge_1b);
+ assert!(leaf_edge_1a != top_edge_1);
+ assert!(leaf_edge_1a != top_edge_2);
+ assert!(top_edge_1 == top_edge_1);
+ assert!(top_edge_1 != top_edge_2);
+
+ root1.pop_internal_level(Global);
+ unsafe { root1.into_dying().deallocate_and_ascend(Global) };
+ unsafe { root2.into_dying().deallocate_and_ascend(Global) };
+}
+
+#[test]
+#[cfg(target_arch = "x86_64")]
+fn test_sizes() {
+ assert_eq!(core::mem::size_of::<LeafNode<(), ()>>(), 16);
+ assert_eq!(core::mem::size_of::<LeafNode<i64, i64>>(), 16 + CAPACITY * 2 * 8);
+ assert_eq!(core::mem::size_of::<InternalNode<(), ()>>(), 16 + (CAPACITY + 1) * 8);
+ assert_eq!(core::mem::size_of::<InternalNode<i64, i64>>(), 16 + (CAPACITY * 3 + 1) * 8);
+}
diff --git a/library/alloc/src/collections/btree/remove.rs b/library/alloc/src/collections/btree/remove.rs
new file mode 100644
index 000000000..090429925
--- /dev/null
+++ b/library/alloc/src/collections/btree/remove.rs
@@ -0,0 +1,95 @@
+use super::map::MIN_LEN;
+use super::node::{marker, ForceResult::*, Handle, LeftOrRight::*, NodeRef};
+use core::alloc::Allocator;
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
+ /// Removes a key-value pair from the tree, and returns that pair, as well as
+ /// the leaf edge corresponding to that former pair. It's possible this empties
+ /// a root node that is internal, which the caller should pop from the map
+ /// holding the tree. The caller should also decrement the map's length.
+ pub fn remove_kv_tracking<F: FnOnce(), A: Allocator + Clone>(
+ self,
+ handle_emptied_internal_root: F,
+ alloc: A,
+ ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
+ match self.force() {
+ Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root, alloc),
+ Internal(node) => node.remove_internal_kv(handle_emptied_internal_root, alloc),
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ fn remove_leaf_kv<F: FnOnce(), A: Allocator + Clone>(
+ self,
+ handle_emptied_internal_root: F,
+ alloc: A,
+ ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
+ let (old_kv, mut pos) = self.remove();
+ let len = pos.reborrow().into_node().len();
+ if len < MIN_LEN {
+ let idx = pos.idx();
+ // We have to temporarily forget the child type, because there is no
+ // distinct node type for the immediate parents of a leaf.
+ let new_pos = match pos.into_node().forget_type().choose_parent_kv() {
+ Ok(Left(left_parent_kv)) => {
+ debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1);
+ if left_parent_kv.can_merge() {
+ left_parent_kv.merge_tracking_child_edge(Right(idx), alloc.clone())
+ } else {
+ debug_assert!(left_parent_kv.left_child_len() > MIN_LEN);
+ left_parent_kv.steal_left(idx)
+ }
+ }
+ Ok(Right(right_parent_kv)) => {
+ debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1);
+ if right_parent_kv.can_merge() {
+ right_parent_kv.merge_tracking_child_edge(Left(idx), alloc.clone())
+ } else {
+ debug_assert!(right_parent_kv.right_child_len() > MIN_LEN);
+ right_parent_kv.steal_right(idx)
+ }
+ }
+ Err(pos) => unsafe { Handle::new_edge(pos, idx) },
+ };
+ // SAFETY: `new_pos` is the leaf we started from or a sibling.
+ pos = unsafe { new_pos.cast_to_leaf_unchecked() };
+
+ // Only if we merged, the parent (if any) has shrunk, but skipping
+ // the following step otherwise does not pay off in benchmarks.
+ //
+ // SAFETY: We won't destroy or rearrange the leaf where `pos` is at
+ // by handling its parent recursively; at worst we will destroy or
+ // rearrange the parent through the grandparent, thus change the
+ // link to the parent inside the leaf.
+ if let Ok(parent) = unsafe { pos.reborrow_mut() }.into_node().ascend() {
+ if !parent.into_node().forget_type().fix_node_and_affected_ancestors(alloc) {
+ handle_emptied_internal_root();
+ }
+ }
+ }
+ (old_kv, pos)
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ fn remove_internal_kv<F: FnOnce(), A: Allocator + Clone>(
+ self,
+ handle_emptied_internal_root: F,
+ alloc: A,
+ ) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
+ // Remove an adjacent KV from its leaf and then put it back in place of
+ // the element we were asked to remove. Prefer the left adjacent KV,
+ // for the reasons listed in `choose_parent_kv`.
+ let left_leaf_kv = self.left_edge().descend().last_leaf_edge().left_kv();
+ let left_leaf_kv = unsafe { left_leaf_kv.ok().unwrap_unchecked() };
+ let (left_kv, left_hole) = left_leaf_kv.remove_leaf_kv(handle_emptied_internal_root, alloc);
+
+ // The internal node may have been stolen from or merged. Go back right
+ // to find where the original KV ended up.
+ let mut internal = unsafe { left_hole.next_kv().ok().unwrap_unchecked() };
+ let old_kv = internal.replace_kv(left_kv.0, left_kv.1);
+ let pos = internal.next_leaf_edge();
+ (old_kv, pos)
+ }
+}
diff --git a/library/alloc/src/collections/btree/search.rs b/library/alloc/src/collections/btree/search.rs
new file mode 100644
index 000000000..ad3522b4e
--- /dev/null
+++ b/library/alloc/src/collections/btree/search.rs
@@ -0,0 +1,285 @@
+use core::borrow::Borrow;
+use core::cmp::Ordering;
+use core::ops::{Bound, RangeBounds};
+
+use super::node::{marker, ForceResult::*, Handle, NodeRef};
+
+use SearchBound::*;
+use SearchResult::*;
+
+pub enum SearchBound<T> {
+ /// An inclusive bound to look for, just like `Bound::Included(T)`.
+ Included(T),
+ /// An exclusive bound to look for, just like `Bound::Excluded(T)`.
+ Excluded(T),
+ /// An unconditional inclusive bound, just like `Bound::Unbounded`.
+ AllIncluded,
+ /// An unconditional exclusive bound.
+ AllExcluded,
+}
+
+impl<T> SearchBound<T> {
+ pub fn from_range(range_bound: Bound<T>) -> Self {
+ match range_bound {
+ Bound::Included(t) => Included(t),
+ Bound::Excluded(t) => Excluded(t),
+ Bound::Unbounded => AllIncluded,
+ }
+ }
+}
+
+pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
+ Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
+ GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>),
+}
+
+pub enum IndexResult {
+ KV(usize),
+ Edge(usize),
+}
+
+impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Looks up a given key in a (sub)tree headed by the node, recursively.
+ /// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+ /// returns a `GoDown` with the handle of the leaf edge where the key belongs.
+ ///
+ /// The result is meaningful only if the tree is ordered by key, like the tree
+ /// in a `BTreeMap` is.
+ pub fn search_tree<Q: ?Sized>(
+ mut self,
+ key: &Q,
+ ) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
+ where
+ Q: Ord,
+ K: Borrow<Q>,
+ {
+ loop {
+ self = match self.search_node(key) {
+ Found(handle) => return Found(handle),
+ GoDown(handle) => match handle.force() {
+ Leaf(leaf) => return GoDown(leaf),
+ Internal(internal) => internal.descend(),
+ },
+ }
+ }
+ }
+
+ /// Descends to the nearest node where the edge matching the lower bound
+ /// of the range is different from the edge matching the upper bound, i.e.,
+ /// the nearest node that has at least one key contained in the range.
+ ///
+ /// If found, returns an `Ok` with that node, the strictly ascending pair of
+ /// edge indices in the node delimiting the range, and the corresponding
+ /// pair of bounds for continuing the search in the child nodes, in case
+ /// the node is internal.
+ ///
+ /// If not found, returns an `Err` with the leaf edge matching the entire
+ /// range.
+ ///
+ /// As a diagnostic service, panics if the range specifies impossible bounds.
+ ///
+ /// The result is meaningful only if the tree is ordered by key.
+ pub fn search_tree_for_bifurcation<'r, Q: ?Sized, R>(
+ mut self,
+ range: &'r R,
+ ) -> Result<
+ (
+ NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ usize,
+ usize,
+ SearchBound<&'r Q>,
+ SearchBound<&'r Q>,
+ ),
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+ >
+ where
+ Q: Ord,
+ K: Borrow<Q>,
+ R: RangeBounds<Q>,
+ {
+ // Determine if map or set is being searched
+ let is_set = <V as super::set_val::IsSetVal>::is_set_val();
+
+ // Inlining these variables should be avoided. We assume the bounds reported by `range`
+ // remain the same, but an adversarial implementation could change between calls (#81138).
+ let (start, end) = (range.start_bound(), range.end_bound());
+ match (start, end) {
+ (Bound::Excluded(s), Bound::Excluded(e)) if s == e => {
+ if is_set {
+ panic!("range start and end are equal and excluded in BTreeSet")
+ } else {
+ panic!("range start and end are equal and excluded in BTreeMap")
+ }
+ }
+ (Bound::Included(s) | Bound::Excluded(s), Bound::Included(e) | Bound::Excluded(e))
+ if s > e =>
+ {
+ if is_set {
+ panic!("range start is greater than range end in BTreeSet")
+ } else {
+ panic!("range start is greater than range end in BTreeMap")
+ }
+ }
+ _ => {}
+ }
+ let mut lower_bound = SearchBound::from_range(start);
+ let mut upper_bound = SearchBound::from_range(end);
+ loop {
+ let (lower_edge_idx, lower_child_bound) = self.find_lower_bound_index(lower_bound);
+ let (upper_edge_idx, upper_child_bound) =
+ unsafe { self.find_upper_bound_index(upper_bound, lower_edge_idx) };
+ if lower_edge_idx < upper_edge_idx {
+ return Ok((
+ self,
+ lower_edge_idx,
+ upper_edge_idx,
+ lower_child_bound,
+ upper_child_bound,
+ ));
+ }
+ debug_assert_eq!(lower_edge_idx, upper_edge_idx);
+ let common_edge = unsafe { Handle::new_edge(self, lower_edge_idx) };
+ match common_edge.force() {
+ Leaf(common_edge) => return Err(common_edge),
+ Internal(common_edge) => {
+ self = common_edge.descend();
+ lower_bound = lower_child_bound;
+ upper_bound = upper_child_bound;
+ }
+ }
+ }
+ }
+
+ /// Finds an edge in the node delimiting the lower bound of a range.
+ /// Also returns the lower bound to be used for continuing the search in
+ /// the matching child node, if `self` is an internal node.
+ ///
+ /// The result is meaningful only if the tree is ordered by key.
+ pub fn find_lower_bound_edge<'r, Q>(
+ self,
+ bound: SearchBound<&'r Q>,
+ ) -> (Handle<Self, marker::Edge>, SearchBound<&'r Q>)
+ where
+ Q: ?Sized + Ord,
+ K: Borrow<Q>,
+ {
+ let (edge_idx, bound) = self.find_lower_bound_index(bound);
+ let edge = unsafe { Handle::new_edge(self, edge_idx) };
+ (edge, bound)
+ }
+
+ /// Clone of `find_lower_bound_edge` for the upper bound.
+ pub fn find_upper_bound_edge<'r, Q>(
+ self,
+ bound: SearchBound<&'r Q>,
+ ) -> (Handle<Self, marker::Edge>, SearchBound<&'r Q>)
+ where
+ Q: ?Sized + Ord,
+ K: Borrow<Q>,
+ {
+ let (edge_idx, bound) = unsafe { self.find_upper_bound_index(bound, 0) };
+ let edge = unsafe { Handle::new_edge(self, edge_idx) };
+ (edge, bound)
+ }
+}
+
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Looks up a given key in the node, without recursion.
+ /// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+ /// returns a `GoDown` with the handle of the edge where the key might be found
+ /// (if the node is internal) or where the key can be inserted.
+ ///
+ /// The result is meaningful only if the tree is ordered by key, like the tree
+ /// in a `BTreeMap` is.
+ pub fn search_node<Q: ?Sized>(self, key: &Q) -> SearchResult<BorrowType, K, V, Type, Type>
+ where
+ Q: Ord,
+ K: Borrow<Q>,
+ {
+ match unsafe { self.find_key_index(key, 0) } {
+ IndexResult::KV(idx) => Found(unsafe { Handle::new_kv(self, idx) }),
+ IndexResult::Edge(idx) => GoDown(unsafe { Handle::new_edge(self, idx) }),
+ }
+ }
+
+ /// Returns either the KV index in the node at which the key (or an equivalent)
+ /// exists, or the edge index where the key belongs, starting from a particular index.
+ ///
+ /// The result is meaningful only if the tree is ordered by key, like the tree
+ /// in a `BTreeMap` is.
+ ///
+ /// # Safety
+ /// `start_index` must be a valid edge index for the node.
+ unsafe fn find_key_index<Q: ?Sized>(&self, key: &Q, start_index: usize) -> IndexResult
+ where
+ Q: Ord,
+ K: Borrow<Q>,
+ {
+ let node = self.reborrow();
+ let keys = node.keys();
+ debug_assert!(start_index <= keys.len());
+ for (offset, k) in unsafe { keys.get_unchecked(start_index..) }.iter().enumerate() {
+ match key.cmp(k.borrow()) {
+ Ordering::Greater => {}
+ Ordering::Equal => return IndexResult::KV(start_index + offset),
+ Ordering::Less => return IndexResult::Edge(start_index + offset),
+ }
+ }
+ IndexResult::Edge(keys.len())
+ }
+
+ /// Finds an edge index in the node delimiting the lower bound of a range.
+ /// Also returns the lower bound to be used for continuing the search in
+ /// the matching child node, if `self` is an internal node.
+ ///
+ /// The result is meaningful only if the tree is ordered by key.
+ fn find_lower_bound_index<'r, Q>(
+ &self,
+ bound: SearchBound<&'r Q>,
+ ) -> (usize, SearchBound<&'r Q>)
+ where
+ Q: ?Sized + Ord,
+ K: Borrow<Q>,
+ {
+ match bound {
+ Included(key) => match unsafe { self.find_key_index(key, 0) } {
+ IndexResult::KV(idx) => (idx, AllExcluded),
+ IndexResult::Edge(idx) => (idx, bound),
+ },
+ Excluded(key) => match unsafe { self.find_key_index(key, 0) } {
+ IndexResult::KV(idx) => (idx + 1, AllIncluded),
+ IndexResult::Edge(idx) => (idx, bound),
+ },
+ AllIncluded => (0, AllIncluded),
+ AllExcluded => (self.len(), AllExcluded),
+ }
+ }
+
+ /// Mirror image of `find_lower_bound_index` for the upper bound,
+ /// with an additional parameter to skip part of the key array.
+ ///
+ /// # Safety
+ /// `start_index` must be a valid edge index for the node.
+ unsafe fn find_upper_bound_index<'r, Q>(
+ &self,
+ bound: SearchBound<&'r Q>,
+ start_index: usize,
+ ) -> (usize, SearchBound<&'r Q>)
+ where
+ Q: ?Sized + Ord,
+ K: Borrow<Q>,
+ {
+ match bound {
+ Included(key) => match unsafe { self.find_key_index(key, start_index) } {
+ IndexResult::KV(idx) => (idx + 1, AllExcluded),
+ IndexResult::Edge(idx) => (idx, bound),
+ },
+ Excluded(key) => match unsafe { self.find_key_index(key, start_index) } {
+ IndexResult::KV(idx) => (idx, AllIncluded),
+ IndexResult::Edge(idx) => (idx, bound),
+ },
+ AllIncluded => (self.len(), AllIncluded),
+ AllExcluded => (start_index, AllExcluded),
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
new file mode 100644
index 000000000..2cfc08074
--- /dev/null
+++ b/library/alloc/src/collections/btree/set.rs
@@ -0,0 +1,1789 @@
+// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
+// to TreeMap
+
+use crate::vec::Vec;
+use core::borrow::Borrow;
+use core::cmp::Ordering::{self, Equal, Greater, Less};
+use core::cmp::{max, min};
+use core::fmt::{self, Debug};
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::mem::ManuallyDrop;
+use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};
+
+use super::map::{BTreeMap, Keys};
+use super::merge_iter::MergeIterInner;
+use super::set_val::SetValZST;
+use super::Recover;
+
+use crate::alloc::{Allocator, Global};
+
+// FIXME(conventions): implement bounded iterators
+
+/// An ordered set based on a B-Tree.
+///
+/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance
+/// benefits and drawbacks.
+///
+/// It is a logic error for an item to be modified in such a way that the item's ordering relative
+/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+/// The behavior resulting from such a logic error is not specified, but will be encapsulated to the
+/// `BTreeSet` that observed the logic error and not result in undefined behavior. This could
+/// include panics, incorrect results, aborts, memory leaks, and non-termination.
+///
+/// Iterators returned by [`BTreeSet::iter`] produce their items in order, and take worst-case
+/// logarithmic and amortized constant time per item returned.
+///
+/// [`Ord`]: core::cmp::Ord
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeSet;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BTreeSet<&str>` in this example).
+/// let mut books = BTreeSet::new();
+///
+/// // Add some books.
+/// books.insert("A Dance With Dragons");
+/// books.insert("To Kill a Mockingbird");
+/// books.insert("The Odyssey");
+/// books.insert("The Great Gatsby");
+///
+/// // Check for a specific one.
+/// if !books.contains("The Winds of Winter") {
+/// println!("We have {} books, but The Winds of Winter ain't one.",
+/// books.len());
+/// }
+///
+/// // Remove a book.
+/// books.remove("The Odyssey");
+///
+/// // Iterate over everything.
+/// for book in &books {
+/// println!("{book}");
+/// }
+/// ```
+///
+/// A `BTreeSet` with a known list of items can be initialized from an array:
+///
+/// ```
+/// use std::collections::BTreeSet;
+///
+/// let set = BTreeSet::from([1, 2, 3]);
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeSet")]
+pub struct BTreeSet<
+ T,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ map: BTreeMap<T, SetValZST, A>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Hash, A: Allocator + Clone> Hash for BTreeSet<T, A> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.map.hash(state)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialEq, A: Allocator + Clone> PartialEq for BTreeSet<T, A> {
+ fn eq(&self, other: &BTreeSet<T, A>) -> bool {
+ self.map.eq(&other.map)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Eq, A: Allocator + Clone> Eq for BTreeSet<T, A> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialOrd, A: Allocator + Clone> PartialOrd for BTreeSet<T, A> {
+ fn partial_cmp(&self, other: &BTreeSet<T, A>) -> Option<Ordering> {
+ self.map.partial_cmp(&other.map)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord, A: Allocator + Clone> Ord for BTreeSet<T, A> {
+ fn cmp(&self, other: &BTreeSet<T, A>) -> Ordering {
+ self.map.cmp(&other.map)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone, A: Allocator + Clone> Clone for BTreeSet<T, A> {
+ fn clone(&self) -> Self {
+ BTreeSet { map: self.map.clone() }
+ }
+
+ fn clone_from(&mut self, other: &Self) {
+ self.map.clone_from(&other.map);
+ }
+}
+
+/// An iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`iter`]: BTreeSet::iter
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ iter: Keys<'a, T, SetValZST>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Iter").field(&self.iter.clone()).finish()
+ }
+}
+
+/// An owning iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`]
+/// (provided by the [`IntoIterator`] trait). See its documentation for more.
+///
+/// [`into_iter`]: BTreeSet#method.into_iter
+/// [`IntoIterator`]: core::iter::IntoIterator
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct IntoIter<
+ T,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ iter: super::map::IntoIter<T, SetValZST, A>,
+}
+
+/// An iterator over a sub-range of items in a `BTreeSet`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`range`]: BTreeSet::range
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[derive(Debug)]
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, T: 'a> {
+ iter: super::map::Range<'a, T, SetValZST>,
+}
+
+/// A lazy iterator producing elements in the difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`difference`]: BTreeSet::difference
+#[must_use = "this returns the difference as an iterator, \
+ without modifying either input set"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Difference<
+ 'a,
+ T: 'a,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ inner: DifferenceInner<'a, T, A>,
+}
+enum DifferenceInner<'a, T: 'a, A: Allocator + Clone> {
+ Stitch {
+ // iterate all of `self` and some of `other`, spotting matches along the way
+ self_iter: Iter<'a, T>,
+ other_iter: Peekable<Iter<'a, T>>,
+ },
+ Search {
+ // iterate `self`, look up in `other`
+ self_iter: Iter<'a, T>,
+ other_set: &'a BTreeSet<T, A>,
+ },
+ Iterate(Iter<'a, T>), // simply produce all elements in `self`
+}
+
+// Explicit Debug impl necessary because of issue #26925
+impl<T: Debug, A: Allocator + Clone> Debug for DifferenceInner<'_, T, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ DifferenceInner::Stitch { self_iter, other_iter } => f
+ .debug_struct("Stitch")
+ .field("self_iter", self_iter)
+ .field("other_iter", other_iter)
+ .finish(),
+ DifferenceInner::Search { self_iter, other_set } => f
+ .debug_struct("Search")
+ .field("self_iter", self_iter)
+ .field("other_iter", other_set)
+ .finish(),
+ DifferenceInner::Iterate(x) => f.debug_tuple("Iterate").field(x).finish(),
+ }
+ }
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug, A: Allocator + Clone> fmt::Debug for Difference<'_, T, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Difference").field(&self.inner).finish()
+ }
+}
+
+/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`symmetric_difference`] method on
+/// [`BTreeSet`]. See its documentation for more.
+///
+/// [`symmetric_difference`]: BTreeSet::symmetric_difference
+#[must_use = "this returns the difference as an iterator, \
+ without modifying either input set"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("SymmetricDifference").field(&self.0).finish()
+ }
+}
+
+/// A lazy iterator producing elements in the intersection of `BTreeSet`s.
+///
+/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`intersection`]: BTreeSet::intersection
+#[must_use = "this returns the intersection as an iterator, \
+ without modifying either input set"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Intersection<
+ 'a,
+ T: 'a,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> {
+ inner: IntersectionInner<'a, T, A>,
+}
+enum IntersectionInner<'a, T: 'a, A: Allocator + Clone> {
+ Stitch {
+ // iterate similarly sized sets jointly, spotting matches along the way
+ a: Iter<'a, T>,
+ b: Iter<'a, T>,
+ },
+ Search {
+ // iterate a small set, look up in the large set
+ small_iter: Iter<'a, T>,
+ large_set: &'a BTreeSet<T, A>,
+ },
+ Answer(Option<&'a T>), // return a specific element or emptiness
+}
+
+// Explicit Debug impl necessary because of issue #26925
+impl<T: Debug, A: Allocator + Clone> Debug for IntersectionInner<'_, T, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ IntersectionInner::Stitch { a, b } => {
+ f.debug_struct("Stitch").field("a", a).field("b", b).finish()
+ }
+ IntersectionInner::Search { small_iter, large_set } => f
+ .debug_struct("Search")
+ .field("small_iter", small_iter)
+ .field("large_set", large_set)
+ .finish(),
+ IntersectionInner::Answer(x) => f.debug_tuple("Answer").field(x).finish(),
+ }
+ }
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: Debug, A: Allocator + Clone> Debug for Intersection<'_, T, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Intersection").field(&self.inner).finish()
+ }
+}
+
+/// A lazy iterator producing elements in the union of `BTreeSet`s.
+///
+/// This `struct` is created by the [`union`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`union`]: BTreeSet::union
+#[must_use = "this returns the union as an iterator, \
+ without modifying either input set"]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Union<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Union<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Union").field(&self.0).finish()
+ }
+}
+
+// This constant is used by functions that compare two sets.
+// It estimates the relative size at which searching performs better
+// than iterating, based on the benchmarks in
+// https://github.com/ssomers/rust_bench_btreeset_intersection.
+// It's used to divide rather than multiply sizes, to rule out overflow,
+// and it's a power of two to make that division cheap.
+const ITER_PERFORMANCE_TIPPING_SIZE_DIFF: usize = 16;
+
+impl<T> BTreeSet<T> {
+ /// Makes a new, empty `BTreeSet`.
+ ///
+ /// Does not allocate anything on its own.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<i32> = BTreeSet::new();
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ #[must_use]
+ pub const fn new() -> BTreeSet<T> {
+ BTreeSet { map: BTreeMap::new() }
+ }
+}
+
+impl<T, A: Allocator + Clone> BTreeSet<T, A> {
+ /// Makes a new `BTreeSet` with a reasonable choice of B.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// # #![feature(allocator_api)]
+ /// # #![feature(btreemap_alloc)]
+ /// use std::collections::BTreeSet;
+ /// use std::alloc::Global;
+ ///
+ /// let mut set: BTreeSet<i32> = BTreeSet::new_in(Global);
+ /// ```
+ #[unstable(feature = "btreemap_alloc", issue = "32838")]
+ pub fn new_in(alloc: A) -> BTreeSet<T, A> {
+ BTreeSet { map: BTreeMap::new_in(alloc) }
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the set.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(3);
+ /// set.insert(5);
+ /// set.insert(8);
+ /// for &elem in set.range((Included(&4), Included(&8))) {
+ /// println!("{elem}");
+ /// }
+ /// assert_eq!(Some(&5), set.range(4..).next());
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range<K: ?Sized, R>(&self, range: R) -> Range<'_, T>
+ where
+ K: Ord,
+ T: Borrow<K> + Ord,
+ R: RangeBounds<K>,
+ {
+ Range { iter: self.map.range(range) }
+ }
+
+ /// Visits the elements representing the difference,
+ /// i.e., the elements that are in `self` but not in `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let diff: Vec<_> = a.difference(&b).cloned().collect();
+ /// assert_eq!(diff, [1]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn difference<'a>(&'a self, other: &'a BTreeSet<T, A>) -> Difference<'a, T, A>
+ where
+ T: Ord,
+ {
+ let (self_min, self_max) =
+ if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+ (self_min, self_max)
+ } else {
+ return Difference { inner: DifferenceInner::Iterate(self.iter()) };
+ };
+ let (other_min, other_max) =
+ if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+ (other_min, other_max)
+ } else {
+ return Difference { inner: DifferenceInner::Iterate(self.iter()) };
+ };
+ Difference {
+ inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
+ (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()),
+ (Equal, _) => {
+ let mut self_iter = self.iter();
+ self_iter.next();
+ DifferenceInner::Iterate(self_iter)
+ }
+ (_, Equal) => {
+ let mut self_iter = self.iter();
+ self_iter.next_back();
+ DifferenceInner::Iterate(self_iter)
+ }
+ _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+ DifferenceInner::Search { self_iter: self.iter(), other_set: other }
+ }
+ _ => DifferenceInner::Stitch {
+ self_iter: self.iter(),
+ other_iter: other.iter().peekable(),
+ },
+ },
+ }
+ }
+
+ /// Visits the elements representing the symmetric difference,
+ /// i.e., the elements that are in `self` or in `other` but not in both,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect();
+ /// assert_eq!(sym_diff, [1, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn symmetric_difference<'a>(
+ &'a self,
+ other: &'a BTreeSet<T, A>,
+ ) -> SymmetricDifference<'a, T>
+ where
+ T: Ord,
+ {
+ SymmetricDifference(MergeIterInner::new(self.iter(), other.iter()))
+ }
+
+ /// Visits the elements representing the intersection,
+ /// i.e., the elements that are both in `self` and `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let intersection: Vec<_> = a.intersection(&b).cloned().collect();
+ /// assert_eq!(intersection, [2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T, A>) -> Intersection<'a, T, A>
+ where
+ T: Ord,
+ {
+ let (self_min, self_max) =
+ if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+ (self_min, self_max)
+ } else {
+ return Intersection { inner: IntersectionInner::Answer(None) };
+ };
+ let (other_min, other_max) =
+ if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+ (other_min, other_max)
+ } else {
+ return Intersection { inner: IntersectionInner::Answer(None) };
+ };
+ Intersection {
+ inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
+ (Greater, _) | (_, Less) => IntersectionInner::Answer(None),
+ (Equal, _) => IntersectionInner::Answer(Some(self_min)),
+ (_, Equal) => IntersectionInner::Answer(Some(self_max)),
+ _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+ IntersectionInner::Search { small_iter: self.iter(), large_set: other }
+ }
+ _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+ IntersectionInner::Search { small_iter: other.iter(), large_set: self }
+ }
+ _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() },
+ },
+ }
+ }
+
+ /// Visits the elements representing the union,
+ /// i.e., all the elements in `self` or `other`, without duplicates,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ ///
+ /// let union: Vec<_> = a.union(&b).cloned().collect();
+ /// assert_eq!(union, [1, 2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn union<'a>(&'a self, other: &'a BTreeSet<T, A>) -> Union<'a, T>
+ where
+ T: Ord,
+ {
+ Union(MergeIterInner::new(self.iter(), other.iter()))
+ }
+
+ /// Clears the set, removing all elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// v.insert(1);
+ /// v.clear();
+ /// assert!(v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self)
+ where
+ A: Clone,
+ {
+ self.map.clear()
+ }
+
+ /// Returns `true` if the set contains an element equal to the value.
+ ///
+ /// The value may be any borrowed form of the set's element type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the element type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set = BTreeSet::from([1, 2, 3]);
+ /// assert_eq!(set.contains(&1), true);
+ /// assert_eq!(set.contains(&4), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+ where
+ T: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ self.map.contains_key(value)
+ }
+
+ /// Returns a reference to the element in the set, if any, that is equal to
+ /// the value.
+ ///
+ /// The value may be any borrowed form of the set's element type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the element type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set = BTreeSet::from([1, 2, 3]);
+ /// assert_eq!(set.get(&2), Some(&2));
+ /// assert_eq!(set.get(&4), None);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+ where
+ T: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ Recover::get(&self.map, value)
+ }
+
+ /// Returns `true` if `self` has no elements in common with `other`.
+ /// This is equivalent to checking for an empty intersection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a = BTreeSet::from([1, 2, 3]);
+ /// let mut b = BTreeSet::new();
+ ///
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(4);
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(1);
+ /// assert_eq!(a.is_disjoint(&b), false);
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_disjoint(&self, other: &BTreeSet<T, A>) -> bool
+ where
+ T: Ord,
+ {
+ self.intersection(other).next().is_none()
+ }
+
+ /// Returns `true` if the set is a subset of another,
+ /// i.e., `other` contains at least all the elements in `self`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sup = BTreeSet::from([1, 2, 3]);
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(2);
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(4);
+ /// assert_eq!(set.is_subset(&sup), false);
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_subset(&self, other: &BTreeSet<T, A>) -> bool
+ where
+ T: Ord,
+ {
+ // Same result as self.difference(other).next().is_none()
+ // but the code below is faster (hugely in some cases).
+ if self.len() > other.len() {
+ return false;
+ }
+ let (self_min, self_max) =
+ if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+ (self_min, self_max)
+ } else {
+ return true; // self is empty
+ };
+ let (other_min, other_max) =
+ if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+ (other_min, other_max)
+ } else {
+ return false; // other is empty
+ };
+ let mut self_iter = self.iter();
+ match self_min.cmp(other_min) {
+ Less => return false,
+ Equal => {
+ self_iter.next();
+ }
+ Greater => (),
+ }
+ match self_max.cmp(other_max) {
+ Greater => return false,
+ Equal => {
+ self_iter.next_back();
+ }
+ Less => (),
+ }
+ if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
+ for next in self_iter {
+ if !other.contains(next) {
+ return false;
+ }
+ }
+ } else {
+ let mut other_iter = other.iter();
+ other_iter.next();
+ other_iter.next_back();
+ let mut self_next = self_iter.next();
+ while let Some(self1) = self_next {
+ match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) {
+ Less => return false,
+ Equal => self_next = self_iter.next(),
+ Greater => (),
+ }
+ }
+ }
+ true
+ }
+
+ /// Returns `true` if the set is a superset of another,
+ /// i.e., `self` contains at least all the elements in `other`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sub = BTreeSet::from([1, 2]);
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(0);
+ /// set.insert(1);
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.is_superset(&sub), true);
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_superset(&self, other: &BTreeSet<T, A>) -> bool
+ where
+ T: Ord,
+ {
+ other.is_subset(self)
+ }
+
+ /// Returns a reference to the first element in the set, if any.
+ /// This element is always the minimum of all elements in the set.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// assert_eq!(set.first(), None);
+ /// set.insert(1);
+ /// assert_eq!(set.first(), Some(&1));
+ /// set.insert(2);
+ /// assert_eq!(set.first(), Some(&1));
+ /// ```
+ #[must_use]
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn first(&self) -> Option<&T>
+ where
+ T: Ord,
+ {
+ self.map.first_key_value().map(|(k, _)| k)
+ }
+
+ /// Returns a reference to the last element in the set, if any.
+ /// This element is always the maximum of all elements in the set.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// assert_eq!(set.last(), None);
+ /// set.insert(1);
+ /// assert_eq!(set.last(), Some(&1));
+ /// set.insert(2);
+ /// assert_eq!(set.last(), Some(&2));
+ /// ```
+ #[must_use]
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn last(&self) -> Option<&T>
+ where
+ T: Ord,
+ {
+ self.map.last_key_value().map(|(k, _)| k)
+ }
+
+ /// Removes the first element from the set and returns it, if any.
+ /// The first element is always the minimum element in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(1);
+ /// while let Some(n) = set.pop_first() {
+ /// assert_eq!(n, 1);
+ /// }
+ /// assert!(set.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_first(&mut self) -> Option<T>
+ where
+ T: Ord,
+ {
+ self.map.pop_first().map(|kv| kv.0)
+ }
+
+ /// Removes the last element from the set and returns it, if any.
+ /// The last element is always the maximum element in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(1);
+ /// while let Some(n) = set.pop_last() {
+ /// assert_eq!(n, 1);
+ /// }
+ /// assert!(set.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_last(&mut self) -> Option<T>
+ where
+ T: Ord,
+ {
+ self.map.pop_last().map(|kv| kv.0)
+ }
+
+ /// Adds a value to the set.
+ ///
+ /// Returns whether the value was newly inserted. That is:
+ ///
+ /// - If the set did not previously contain an equal value, `true` is
+ /// returned.
+ /// - If the set already contained an equal value, `false` is returned, and
+ /// the entry is not updated.
+ ///
+ /// See the [module-level documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.insert(2), true);
+ /// assert_eq!(set.insert(2), false);
+ /// assert_eq!(set.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, value: T) -> bool
+ where
+ T: Ord,
+ {
+ self.map.insert(value, SetValZST::default()).is_none()
+ }
+
+ /// Adds a value to the set, replacing the existing element, if any, that is
+ /// equal to the value. Returns the replaced element.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(Vec::<i32>::new());
+ ///
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+ /// set.replace(Vec::with_capacity(10));
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn replace(&mut self, value: T) -> Option<T>
+ where
+ T: Ord,
+ {
+ Recover::replace(&mut self.map, value)
+ }
+
+ /// If the set contains an element equal to the value, removes it from the
+ /// set and drops it. Returns whether such an element was present.
+ ///
+ /// The value may be any borrowed form of the set's element type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the element type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.remove(&2), true);
+ /// assert_eq!(set.remove(&2), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+ where
+ T: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ self.map.remove(value).is_some()
+ }
+
+ /// Removes and returns the element in the set, if any, that is equal to
+ /// the value.
+ ///
+ /// The value may be any borrowed form of the set's element type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the element type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::from([1, 2, 3]);
+ /// assert_eq!(set.take(&2), Some(2));
+ /// assert_eq!(set.take(&2), None);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+ where
+ T: Borrow<Q> + Ord,
+ Q: Ord,
+ {
+ Recover::take(&mut self.map, value)
+ }
+
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all elements `e` for which `f(&e)` returns `false`.
+ /// The elements are visited in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::from([1, 2, 3, 4, 5, 6]);
+ /// // Keep only the even numbers.
+ /// set.retain(|&k| k % 2 == 0);
+ /// assert!(set.iter().eq([2, 4, 6].iter()));
+ /// ```
+ #[stable(feature = "btree_retain", since = "1.53.0")]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ T: Ord,
+ F: FnMut(&T) -> bool,
+ {
+ self.drain_filter(|v| !f(v));
+ }
+
+ /// Moves all elements from `other` into `self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(3);
+ /// b.insert(4);
+ /// b.insert(5);
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ /// assert!(a.contains(&3));
+ /// assert!(a.contains(&4));
+ /// assert!(a.contains(&5));
+ /// ```
+ #[stable(feature = "btree_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self)
+ where
+ T: Ord,
+ A: Clone,
+ {
+ self.map.append(&mut other.map);
+ }
+
+ /// Splits the collection into two at the value. Returns a new collection
+ /// with all elements greater than or equal to the value.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ /// a.insert(17);
+ /// a.insert(41);
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ ///
+ /// assert!(b.contains(&3));
+ /// assert!(b.contains(&17));
+ /// assert!(b.contains(&41));
+ /// ```
+ #[stable(feature = "btree_split_off", since = "1.11.0")]
+ pub fn split_off<Q: ?Sized + Ord>(&mut self, value: &Q) -> Self
+ where
+ T: Borrow<Q> + Ord,
+ A: Clone,
+ {
+ BTreeSet { map: self.map.split_off(value) }
+ }
+
+ /// Creates an iterator that visits all elements in ascending order and
+ /// uses a closure to determine if an element should be removed.
+ ///
+ /// If the closure returns `true`, the element is removed from the set and
+ /// yielded. If the closure returns `false`, or panics, the element remains
+ /// in the set and will not be yielded.
+ ///
+ /// If the iterator is only partially consumed or not consumed at all, each
+ /// of the remaining elements is still subjected to the closure and removed
+ /// and dropped if it returns `true`.
+ ///
+ /// It is unspecified how many more elements will be subjected to the
+ /// closure if a panic occurs in the closure, or if a panic occurs while
+ /// dropping an element, or if the `DrainFilter` itself is leaked.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a set into even and odd values, reusing the original set:
+ ///
+ /// ```
+ /// #![feature(btree_drain_filter)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<i32> = (0..8).collect();
+ /// let evens: BTreeSet<_> = set.drain_filter(|v| v % 2 == 0).collect();
+ /// let odds = set;
+ /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
+ /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
+ /// ```
+ #[unstable(feature = "btree_drain_filter", issue = "70530")]
+ pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F, A>
+ where
+ T: Ord,
+ F: 'a + FnMut(&T) -> bool,
+ {
+ let (inner, alloc) = self.map.drain_filter_inner();
+ DrainFilter { pred, inner, alloc }
+ }
+
+ /// Gets an iterator that visits the elements in the `BTreeSet` in ascending
+ /// order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set = BTreeSet::from([1, 2, 3]);
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+ ///
+ /// Values returned by the iterator are returned in ascending order:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set = BTreeSet::from([3, 1, 2]);
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter { iter: self.map.keys() }
+ }
+
+ /// Returns the number of elements in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert_eq!(v.len(), 0);
+ /// v.insert(1);
+ /// assert_eq!(v.len(), 1);
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ pub const fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ /// Returns `true` if the set contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert!(v.is_empty());
+ /// v.insert(1);
+ /// assert!(!v.is_empty());
+ /// ```
+ #[must_use]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ pub const fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> FromIterator<T> for BTreeSet<T> {
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BTreeSet<T> {
+ let mut inputs: Vec<_> = iter.into_iter().collect();
+
+ if inputs.is_empty() {
+ return BTreeSet::new();
+ }
+
+ // use stable sort to preserve the insertion order.
+ inputs.sort();
+ BTreeSet::from_sorted_iter(inputs.into_iter(), Global)
+ }
+}
+
+impl<T: Ord, A: Allocator + Clone> BTreeSet<T, A> {
+ fn from_sorted_iter<I: Iterator<Item = T>>(iter: I, alloc: A) -> BTreeSet<T, A> {
+ let iter = iter.map(|k| (k, SetValZST::default()));
+ let map = BTreeMap::bulk_build_from_sorted_iter(iter, alloc);
+ BTreeSet { map }
+ }
+}
+
+#[stable(feature = "std_collections_from_array", since = "1.56.0")]
+impl<T: Ord, const N: usize> From<[T; N]> for BTreeSet<T> {
+ /// Converts a `[T; N]` into a `BTreeSet<T>`.
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set1 = BTreeSet::from([1, 2, 3, 4]);
+ /// let set2: BTreeSet<_> = [1, 2, 3, 4].into();
+ /// assert_eq!(set1, set2);
+ /// ```
+ fn from(mut arr: [T; N]) -> Self {
+ if N == 0 {
+ return BTreeSet::new();
+ }
+
+ // use stable sort to preserve the insertion order.
+ arr.sort();
+ let iter = IntoIterator::into_iter(arr).map(|k| (k, SetValZST::default()));
+ let map = BTreeMap::bulk_build_from_sorted_iter(iter, Global);
+ BTreeSet { map }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T, A: Allocator + Clone> IntoIterator for BTreeSet<T, A> {
+ type Item = T;
+ type IntoIter = IntoIter<T, A>;
+
+ /// Gets an iterator for moving out the `BTreeSet`'s contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set = BTreeSet::from([1, 2, 3, 4]);
+ ///
+ /// let v: Vec<_> = set.into_iter().collect();
+ /// assert_eq!(v, [1, 2, 3, 4]);
+ /// ```
+ fn into_iter(self) -> IntoIter<T, A> {
+ IntoIter { iter: self.map.into_iter() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T, A: Allocator + Clone> IntoIterator for &'a BTreeSet<T, A> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+/// An iterator produced by calling `drain_filter` on BTreeSet.
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+pub struct DrainFilter<
+ 'a,
+ T,
+ F,
+ #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
+> where
+ T: 'a,
+ F: 'a + FnMut(&T) -> bool,
+{
+ pred: F,
+ inner: super::map::DrainFilterInner<'a, T, SetValZST>,
+ /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`.
+ alloc: A,
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F, A: Allocator + Clone> Drop for DrainFilter<'_, T, F, A>
+where
+ F: FnMut(&T) -> bool,
+{
+ fn drop(&mut self) {
+ self.for_each(drop);
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F, A: Allocator + Clone> fmt::Debug for DrainFilter<'_, T, F, A>
+where
+ T: fmt::Debug,
+ F: FnMut(&T) -> bool,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("DrainFilter").field(&self.inner.peek().map(|(k, _)| k)).finish()
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<'a, T, F, A: Allocator + Clone> Iterator for DrainFilter<'_, T, F, A>
+where
+ F: 'a + FnMut(&T) -> bool,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ let pred = &mut self.pred;
+ let mut mapped_pred = |k: &T, _v: &mut SetValZST| pred(k);
+ self.inner.next(&mut mapped_pred, self.alloc.clone()).map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F, A: Allocator + Clone> FusedIterator for DrainFilter<'_, T, F, A> where
+ F: FnMut(&T) -> bool
+{
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord, A: Allocator + Clone> Extend<T> for BTreeSet<T, A> {
+ #[inline]
+ fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
+ iter.into_iter().for_each(move |elem| {
+ self.insert(elem);
+ });
+ }
+
+ #[inline]
+ fn extend_one(&mut self, elem: T) {
+ self.insert(elem);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Ord + Copy, A: Allocator + Clone> Extend<&'a T> for BTreeSet<T, A> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+
+ #[inline]
+ fn extend_one(&mut self, &elem: &'a T) {
+ self.insert(elem);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for BTreeSet<T> {
+ /// Creates an empty `BTreeSet`.
+ fn default() -> BTreeSet<T> {
+ BTreeSet::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone, A: Allocator + Clone> Sub<&BTreeSet<T, A>> for &BTreeSet<T, A> {
+ type Output = BTreeSet<T, A>;
+
+ /// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a = BTreeSet::from([1, 2, 3]);
+ /// let b = BTreeSet::from([3, 4, 5]);
+ ///
+ /// let result = &a - &b;
+ /// assert_eq!(result, BTreeSet::from([1, 2]));
+ /// ```
+ fn sub(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
+ BTreeSet::from_sorted_iter(
+ self.difference(rhs).cloned(),
+ ManuallyDrop::into_inner(self.map.alloc.clone()),
+ )
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone, A: Allocator + Clone> BitXor<&BTreeSet<T, A>> for &BTreeSet<T, A> {
+ type Output = BTreeSet<T, A>;
+
+ /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a = BTreeSet::from([1, 2, 3]);
+ /// let b = BTreeSet::from([2, 3, 4]);
+ ///
+ /// let result = &a ^ &b;
+ /// assert_eq!(result, BTreeSet::from([1, 4]));
+ /// ```
+ fn bitxor(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
+ BTreeSet::from_sorted_iter(
+ self.symmetric_difference(rhs).cloned(),
+ ManuallyDrop::into_inner(self.map.alloc.clone()),
+ )
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone, A: Allocator + Clone> BitAnd<&BTreeSet<T, A>> for &BTreeSet<T, A> {
+ type Output = BTreeSet<T, A>;
+
+ /// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a = BTreeSet::from([1, 2, 3]);
+ /// let b = BTreeSet::from([2, 3, 4]);
+ ///
+ /// let result = &a & &b;
+ /// assert_eq!(result, BTreeSet::from([2, 3]));
+ /// ```
+ fn bitand(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
+ BTreeSet::from_sorted_iter(
+ self.intersection(rhs).cloned(),
+ ManuallyDrop::into_inner(self.map.alloc.clone()),
+ )
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone, A: Allocator + Clone> BitOr<&BTreeSet<T, A>> for &BTreeSet<T, A> {
+ type Output = BTreeSet<T, A>;
+
+ /// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a = BTreeSet::from([1, 2, 3]);
+ /// let b = BTreeSet::from([3, 4, 5]);
+ ///
+ /// let result = &a | &b;
+ /// assert_eq!(result, BTreeSet::from([1, 2, 3, 4, 5]));
+ /// ```
+ fn bitor(self, rhs: &BTreeSet<T, A>) -> BTreeSet<T, A> {
+ BTreeSet::from_sorted_iter(
+ self.union(rhs).cloned(),
+ ManuallyDrop::into_inner(self.map.alloc.clone()),
+ )
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Debug, A: Allocator + Clone> Debug for BTreeSet<T, A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+ fn clone(&self) -> Self {
+ Iter { iter: self.iter.clone() }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {
+ fn len(&self) -> usize {
+ self.iter.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T, A: Allocator + Clone> Iterator for IntoIter<T, A> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T, A: Allocator + Clone> DoubleEndedIterator for IntoIter<T, A> {
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T, A: Allocator + Clone> ExactSizeIterator for IntoIter<T, A> {
+ fn len(&self) -> usize {
+ self.iter.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T, A: Allocator + Clone> FusedIterator for IntoIter<T, A> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<T> Clone for Range<'_, T> {
+ fn clone(&self) -> Self {
+ Range { iter: self.iter.clone() }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> Iterator for Range<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+
+ fn last(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> DoubleEndedIterator for Range<'a, T> {
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Range<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T, A: Allocator + Clone> Clone for Difference<'_, T, A> {
+ fn clone(&self) -> Self {
+ Difference {
+ inner: match &self.inner {
+ DifferenceInner::Stitch { self_iter, other_iter } => DifferenceInner::Stitch {
+ self_iter: self_iter.clone(),
+ other_iter: other_iter.clone(),
+ },
+ DifferenceInner::Search { self_iter, other_set } => {
+ DifferenceInner::Search { self_iter: self_iter.clone(), other_set }
+ }
+ DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()),
+ },
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord, A: Allocator + Clone> Iterator for Difference<'a, T, A> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match &mut self.inner {
+ DifferenceInner::Stitch { self_iter, other_iter } => {
+ let mut self_next = self_iter.next()?;
+ loop {
+ match other_iter.peek().map_or(Less, |other_next| self_next.cmp(other_next)) {
+ Less => return Some(self_next),
+ Equal => {
+ self_next = self_iter.next()?;
+ other_iter.next();
+ }
+ Greater => {
+ other_iter.next();
+ }
+ }
+ }
+ }
+ DifferenceInner::Search { self_iter, other_set } => loop {
+ let self_next = self_iter.next()?;
+ if !other_set.contains(&self_next) {
+ return Some(self_next);
+ }
+ },
+ DifferenceInner::Iterate(iter) => iter.next(),
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (self_len, other_len) = match &self.inner {
+ DifferenceInner::Stitch { self_iter, other_iter } => {
+ (self_iter.len(), other_iter.len())
+ }
+ DifferenceInner::Search { self_iter, other_set } => (self_iter.len(), other_set.len()),
+ DifferenceInner::Iterate(iter) => (iter.len(), 0),
+ };
+ (self_len.saturating_sub(other_len), Some(self_len))
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord, A: Allocator + Clone> FusedIterator for Difference<'_, T, A> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for SymmetricDifference<'_, T> {
+ fn clone(&self) -> Self {
+ SymmetricDifference(self.0.clone())
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ let (a_next, b_next) = self.0.nexts(Self::Item::cmp);
+ if a_next.and(b_next).is_none() {
+ return a_next.or(b_next);
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (a_len, b_len) = self.0.lens();
+ // No checked_add, because even if a and b refer to the same set,
+ // and T is a zero-sized type, the storage overhead of sets limits
+ // the number of elements to less than half the range of usize.
+ (0, Some(a_len + b_len))
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for SymmetricDifference<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T, A: Allocator + Clone> Clone for Intersection<'_, T, A> {
+ fn clone(&self) -> Self {
+ Intersection {
+ inner: match &self.inner {
+ IntersectionInner::Stitch { a, b } => {
+ IntersectionInner::Stitch { a: a.clone(), b: b.clone() }
+ }
+ IntersectionInner::Search { small_iter, large_set } => {
+ IntersectionInner::Search { small_iter: small_iter.clone(), large_set }
+ }
+ IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer),
+ },
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord, A: Allocator + Clone> Iterator for Intersection<'a, T, A> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match &mut self.inner {
+ IntersectionInner::Stitch { a, b } => {
+ let mut a_next = a.next()?;
+ let mut b_next = b.next()?;
+ loop {
+ match a_next.cmp(b_next) {
+ Less => a_next = a.next()?,
+ Greater => b_next = b.next()?,
+ Equal => return Some(a_next),
+ }
+ }
+ }
+ IntersectionInner::Search { small_iter, large_set } => loop {
+ let small_next = small_iter.next()?;
+ if large_set.contains(&small_next) {
+ return Some(small_next);
+ }
+ },
+ IntersectionInner::Answer(answer) => answer.take(),
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ match &self.inner {
+ IntersectionInner::Stitch { a, b } => (0, Some(min(a.len(), b.len()))),
+ IntersectionInner::Search { small_iter, .. } => (0, Some(small_iter.len())),
+ IntersectionInner::Answer(None) => (0, Some(0)),
+ IntersectionInner::Answer(Some(_)) => (1, Some(1)),
+ }
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord, A: Allocator + Clone> FusedIterator for Intersection<'_, T, A> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Union<'_, T> {
+ fn clone(&self) -> Self {
+ Union(self.0.clone())
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Union<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ let (a_next, b_next) = self.0.nexts(Self::Item::cmp);
+ a_next.or(b_next)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (a_len, b_len) = self.0.lens();
+ // No checked_add - see SymmetricDifference::size_hint.
+ (max(a_len, b_len), Some(a_len + b_len))
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for Union<'_, T> {}
+
+#[cfg(test)]
+mod tests;
diff --git a/library/alloc/src/collections/btree/set/tests.rs b/library/alloc/src/collections/btree/set/tests.rs
new file mode 100644
index 000000000..502d3e1d1
--- /dev/null
+++ b/library/alloc/src/collections/btree/set/tests.rs
@@ -0,0 +1,856 @@
+use super::super::testing::crash_test::{CrashTestDummy, Panic};
+use super::super::testing::rng::DeterministicRng;
+use super::*;
+use crate::vec::Vec;
+use std::cmp::Ordering;
+use std::hash::{Hash, Hasher};
+use std::iter::FromIterator;
+use std::ops::Bound::{Excluded, Included};
+use std::panic::{catch_unwind, AssertUnwindSafe};
+
+#[test]
+fn test_clone_eq() {
+ let mut m = BTreeSet::new();
+
+ m.insert(1);
+ m.insert(2);
+
+ assert_eq!(m.clone(), m);
+}
+
+#[test]
+fn test_iter_min_max() {
+ let mut a = BTreeSet::new();
+ assert_eq!(a.iter().min(), None);
+ assert_eq!(a.iter().max(), None);
+ assert_eq!(a.range(..).min(), None);
+ assert_eq!(a.range(..).max(), None);
+ assert_eq!(a.difference(&BTreeSet::new()).min(), None);
+ assert_eq!(a.difference(&BTreeSet::new()).max(), None);
+ assert_eq!(a.intersection(&a).min(), None);
+ assert_eq!(a.intersection(&a).max(), None);
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), None);
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), None);
+ assert_eq!(a.union(&a).min(), None);
+ assert_eq!(a.union(&a).max(), None);
+ a.insert(1);
+ a.insert(2);
+ assert_eq!(a.iter().min(), Some(&1));
+ assert_eq!(a.iter().max(), Some(&2));
+ assert_eq!(a.range(..).min(), Some(&1));
+ assert_eq!(a.range(..).max(), Some(&2));
+ assert_eq!(a.difference(&BTreeSet::new()).min(), Some(&1));
+ assert_eq!(a.difference(&BTreeSet::new()).max(), Some(&2));
+ assert_eq!(a.intersection(&a).min(), Some(&1));
+ assert_eq!(a.intersection(&a).max(), Some(&2));
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), Some(&1));
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), Some(&2));
+ assert_eq!(a.union(&a).min(), Some(&1));
+ assert_eq!(a.union(&a).max(), Some(&2));
+}
+
+fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F)
+where
+ F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, &mut dyn FnMut(&i32) -> bool) -> bool,
+{
+ let mut set_a = BTreeSet::new();
+ let mut set_b = BTreeSet::new();
+
+ for x in a {
+ assert!(set_a.insert(*x))
+ }
+ for y in b {
+ assert!(set_b.insert(*y))
+ }
+
+ let mut i = 0;
+ f(&set_a, &set_b, &mut |&x| {
+ if i < expected.len() {
+ assert_eq!(x, expected[i]);
+ }
+ i += 1;
+ true
+ });
+ assert_eq!(i, expected.len());
+}
+
+#[test]
+fn test_intersection() {
+ fn check_intersection(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.intersection(y).all(f))
+ }
+
+ check_intersection(&[], &[], &[]);
+ check_intersection(&[1, 2, 3], &[], &[]);
+ check_intersection(&[], &[1, 2, 3], &[]);
+ check_intersection(&[2], &[1, 2, 3], &[2]);
+ check_intersection(&[1, 2, 3], &[2], &[2]);
+ check_intersection(&[11, 1, 3, 77, 103, 5, -5], &[2, 11, 77, -9, -42, 5, 3], &[3, 5, 11, 77]);
+
+ if cfg!(miri) {
+ // Miri is too slow
+ return;
+ }
+
+ let large = Vec::from_iter(0..100);
+ check_intersection(&[], &large, &[]);
+ check_intersection(&large, &[], &[]);
+ check_intersection(&[-1], &large, &[]);
+ check_intersection(&large, &[-1], &[]);
+ check_intersection(&[0], &large, &[0]);
+ check_intersection(&large, &[0], &[0]);
+ check_intersection(&[99], &large, &[99]);
+ check_intersection(&large, &[99], &[99]);
+ check_intersection(&[100], &large, &[]);
+ check_intersection(&large, &[100], &[]);
+ check_intersection(&[11, 5000, 1, 3, 77, 8924], &large, &[1, 3, 11, 77]);
+}
+
+#[test]
+fn test_intersection_size_hint() {
+ let x = BTreeSet::from([3, 4]);
+ let y = BTreeSet::from([1, 2, 3]);
+ let mut iter = x.intersection(&y);
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+ assert_eq!(iter.next(), Some(&3));
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+
+ iter = y.intersection(&y);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), Some(&1));
+ assert_eq!(iter.size_hint(), (0, Some(2)));
+}
+
+#[test]
+fn test_difference() {
+ fn check_difference(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.difference(y).all(f))
+ }
+
+ check_difference(&[], &[], &[]);
+ check_difference(&[1, 12], &[], &[1, 12]);
+ check_difference(&[], &[1, 2, 3, 9], &[]);
+ check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]);
+ check_difference(&[1, 3, 5, 9, 11], &[3, 6, 9], &[1, 5, 11]);
+ check_difference(&[1, 3, 5, 9, 11], &[0, 1], &[3, 5, 9, 11]);
+ check_difference(&[1, 3, 5, 9, 11], &[11, 12], &[1, 3, 5, 9]);
+ check_difference(
+ &[-5, 11, 22, 33, 40, 42],
+ &[-12, -5, 14, 23, 34, 38, 39, 50],
+ &[11, 22, 33, 40, 42],
+ );
+
+ if cfg!(miri) {
+ // Miri is too slow
+ return;
+ }
+
+ let large = Vec::from_iter(0..100);
+ check_difference(&[], &large, &[]);
+ check_difference(&[-1], &large, &[-1]);
+ check_difference(&[0], &large, &[]);
+ check_difference(&[99], &large, &[]);
+ check_difference(&[100], &large, &[100]);
+ check_difference(&[11, 5000, 1, 3, 77, 8924], &large, &[5000, 8924]);
+ check_difference(&large, &[], &large);
+ check_difference(&large, &[-1], &large);
+ check_difference(&large, &[100], &large);
+}
+
+#[test]
+fn test_difference_size_hint() {
+ let s246 = BTreeSet::from([2, 4, 6]);
+ let s23456 = BTreeSet::from_iter(2..=6);
+ let mut iter = s246.difference(&s23456);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), None);
+
+ let s12345 = BTreeSet::from_iter(1..=5);
+ iter = s246.difference(&s12345);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), Some(&6));
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+
+ let s34567 = BTreeSet::from_iter(3..=7);
+ iter = s246.difference(&s34567);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (0, Some(2)));
+ assert_eq!(iter.next(), None);
+
+ let s1 = BTreeSet::from_iter(-9..=1);
+ iter = s246.difference(&s1);
+ assert_eq!(iter.size_hint(), (3, Some(3)));
+
+ let s2 = BTreeSet::from_iter(-9..=2);
+ iter = s246.difference(&s2);
+ assert_eq!(iter.size_hint(), (2, Some(2)));
+ assert_eq!(iter.next(), Some(&4));
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+
+ let s23 = BTreeSet::from([2, 3]);
+ iter = s246.difference(&s23);
+ assert_eq!(iter.size_hint(), (1, Some(3)));
+ assert_eq!(iter.next(), Some(&4));
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+
+ let s4 = BTreeSet::from([4]);
+ iter = s246.difference(&s4);
+ assert_eq!(iter.size_hint(), (2, Some(3)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (1, Some(2)));
+ assert_eq!(iter.next(), Some(&6));
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+
+ let s56 = BTreeSet::from([5, 6]);
+ iter = s246.difference(&s56);
+ assert_eq!(iter.size_hint(), (1, Some(3)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (0, Some(2)));
+
+ let s6 = BTreeSet::from_iter(6..=19);
+ iter = s246.difference(&s6);
+ assert_eq!(iter.size_hint(), (2, Some(2)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+
+ let s7 = BTreeSet::from_iter(7..=19);
+ iter = s246.difference(&s7);
+ assert_eq!(iter.size_hint(), (3, Some(3)));
+}
+
+#[test]
+fn test_symmetric_difference() {
+ fn check_symmetric_difference(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.symmetric_difference(y).all(f))
+ }
+
+ check_symmetric_difference(&[], &[], &[]);
+ check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]);
+ check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]);
+ check_symmetric_difference(&[1, 3, 5, 9, 11], &[-2, 3, 9, 14, 22], &[-2, 1, 5, 11, 14, 22]);
+}
+
+#[test]
+fn test_symmetric_difference_size_hint() {
+ let x = BTreeSet::from([2, 4]);
+ let y = BTreeSet::from([1, 2, 3]);
+ let mut iter = x.symmetric_difference(&y);
+ assert_eq!(iter.size_hint(), (0, Some(5)));
+ assert_eq!(iter.next(), Some(&1));
+ assert_eq!(iter.size_hint(), (0, Some(4)));
+ assert_eq!(iter.next(), Some(&3));
+ assert_eq!(iter.size_hint(), (0, Some(1)));
+}
+
+#[test]
+fn test_union() {
+ fn check_union(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.union(y).all(f))
+ }
+
+ check_union(&[], &[], &[]);
+ check_union(&[1, 2, 3], &[2], &[1, 2, 3]);
+ check_union(&[2], &[1, 2, 3], &[1, 2, 3]);
+ check_union(
+ &[1, 3, 5, 9, 11, 16, 19, 24],
+ &[-2, 1, 5, 9, 13, 19],
+ &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24],
+ );
+}
+
+#[test]
+fn test_union_size_hint() {
+ let x = BTreeSet::from([2, 4]);
+ let y = BTreeSet::from([1, 2, 3]);
+ let mut iter = x.union(&y);
+ assert_eq!(iter.size_hint(), (3, Some(5)));
+ assert_eq!(iter.next(), Some(&1));
+ assert_eq!(iter.size_hint(), (2, Some(4)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (1, Some(2)));
+}
+
+#[test]
+// Only tests the simple function definition with respect to intersection
+fn test_is_disjoint() {
+ let one = BTreeSet::from([1]);
+ let two = BTreeSet::from([2]);
+ assert!(one.is_disjoint(&two));
+}
+
+#[test]
+// Also implicitly tests the trivial function definition of is_superset
+fn test_is_subset() {
+ fn is_subset(a: &[i32], b: &[i32]) -> bool {
+ let set_a = BTreeSet::from_iter(a.iter());
+ let set_b = BTreeSet::from_iter(b.iter());
+ set_a.is_subset(&set_b)
+ }
+
+ assert_eq!(is_subset(&[], &[]), true);
+ assert_eq!(is_subset(&[], &[1, 2]), true);
+ assert_eq!(is_subset(&[0], &[1, 2]), false);
+ assert_eq!(is_subset(&[1], &[1, 2]), true);
+ assert_eq!(is_subset(&[2], &[1, 2]), true);
+ assert_eq!(is_subset(&[3], &[1, 2]), false);
+ assert_eq!(is_subset(&[1, 2], &[1]), false);
+ assert_eq!(is_subset(&[1, 2], &[1, 2]), true);
+ assert_eq!(is_subset(&[1, 2], &[2, 3]), false);
+ assert_eq!(
+ is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42]),
+ true
+ );
+ assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 34, 38]), false);
+
+ if cfg!(miri) {
+ // Miri is too slow
+ return;
+ }
+
+ let large = Vec::from_iter(0..100);
+ assert_eq!(is_subset(&[], &large), true);
+ assert_eq!(is_subset(&large, &[]), false);
+ assert_eq!(is_subset(&[-1], &large), false);
+ assert_eq!(is_subset(&[0], &large), true);
+ assert_eq!(is_subset(&[1, 2], &large), true);
+ assert_eq!(is_subset(&[99, 100], &large), false);
+}
+
+#[test]
+fn test_is_superset() {
+ fn is_superset(a: &[i32], b: &[i32]) -> bool {
+ let set_a = BTreeSet::from_iter(a.iter());
+ let set_b = BTreeSet::from_iter(b.iter());
+ set_a.is_superset(&set_b)
+ }
+
+ assert_eq!(is_superset(&[], &[]), true);
+ assert_eq!(is_superset(&[], &[1, 2]), false);
+ assert_eq!(is_superset(&[0], &[1, 2]), false);
+ assert_eq!(is_superset(&[1], &[1, 2]), false);
+ assert_eq!(is_superset(&[4], &[1, 2]), false);
+ assert_eq!(is_superset(&[1, 4], &[1, 2]), false);
+ assert_eq!(is_superset(&[1, 2], &[1, 2]), true);
+ assert_eq!(is_superset(&[1, 2, 3], &[1, 3]), true);
+ assert_eq!(is_superset(&[1, 2, 3], &[]), true);
+ assert_eq!(is_superset(&[-1, 1, 2, 3], &[-1, 3]), true);
+
+ if cfg!(miri) {
+ // Miri is too slow
+ return;
+ }
+
+ let large = Vec::from_iter(0..100);
+ assert_eq!(is_superset(&[], &large), false);
+ assert_eq!(is_superset(&large, &[]), true);
+ assert_eq!(is_superset(&large, &[1]), true);
+ assert_eq!(is_superset(&large, &[50, 99]), true);
+ assert_eq!(is_superset(&large, &[100]), false);
+ assert_eq!(is_superset(&large, &[0, 99]), true);
+ assert_eq!(is_superset(&[-1], &large), false);
+ assert_eq!(is_superset(&[0], &large), false);
+ assert_eq!(is_superset(&[99, 100], &large), false);
+}
+
+#[test]
+fn test_retain() {
+ let mut set = BTreeSet::from([1, 2, 3, 4, 5, 6]);
+ set.retain(|&k| k % 2 == 0);
+ assert_eq!(set.len(), 3);
+ assert!(set.contains(&2));
+ assert!(set.contains(&4));
+ assert!(set.contains(&6));
+}
+
+#[test]
+fn test_drain_filter() {
+ let mut x = BTreeSet::from([1]);
+ let mut y = BTreeSet::from([1]);
+
+ x.drain_filter(|_| true);
+ y.drain_filter(|_| false);
+ assert_eq!(x.len(), 0);
+ assert_eq!(y.len(), 1);
+}
+
+#[test]
+fn test_drain_filter_drop_panic_leak() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+ let mut set = BTreeSet::new();
+ set.insert(a.spawn(Panic::Never));
+ set.insert(b.spawn(Panic::InDrop));
+ set.insert(c.spawn(Panic::Never));
+
+ catch_unwind(move || drop(set.drain_filter(|dummy| dummy.query(true)))).ok();
+
+ assert_eq!(a.queried(), 1);
+ assert_eq!(b.queried(), 1);
+ assert_eq!(c.queried(), 0);
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 1);
+ assert_eq!(c.dropped(), 1);
+}
+
+#[test]
+fn test_drain_filter_pred_panic_leak() {
+ let a = CrashTestDummy::new(0);
+ let b = CrashTestDummy::new(1);
+ let c = CrashTestDummy::new(2);
+ let mut set = BTreeSet::new();
+ set.insert(a.spawn(Panic::Never));
+ set.insert(b.spawn(Panic::InQuery));
+ set.insert(c.spawn(Panic::InQuery));
+
+ catch_unwind(AssertUnwindSafe(|| drop(set.drain_filter(|dummy| dummy.query(true))))).ok();
+
+ assert_eq!(a.queried(), 1);
+ assert_eq!(b.queried(), 1);
+ assert_eq!(c.queried(), 0);
+ assert_eq!(a.dropped(), 1);
+ assert_eq!(b.dropped(), 0);
+ assert_eq!(c.dropped(), 0);
+ assert_eq!(set.len(), 2);
+ assert_eq!(set.first().unwrap().id(), 1);
+ assert_eq!(set.last().unwrap().id(), 2);
+}
+
+#[test]
+fn test_clear() {
+ let mut x = BTreeSet::new();
+ x.insert(1);
+
+ x.clear();
+ assert!(x.is_empty());
+}
+#[test]
+fn test_remove() {
+ let mut x = BTreeSet::new();
+ assert!(x.is_empty());
+
+ x.insert(1);
+ x.insert(2);
+ x.insert(3);
+ x.insert(4);
+
+ assert_eq!(x.remove(&2), true);
+ assert_eq!(x.remove(&0), false);
+ assert_eq!(x.remove(&5), false);
+ assert_eq!(x.remove(&1), true);
+ assert_eq!(x.remove(&2), false);
+ assert_eq!(x.remove(&3), true);
+ assert_eq!(x.remove(&4), true);
+ assert_eq!(x.remove(&4), false);
+ assert!(x.is_empty());
+}
+
+#[test]
+fn test_zip() {
+ let mut x = BTreeSet::new();
+ x.insert(5);
+ x.insert(12);
+ x.insert(11);
+
+ let mut y = BTreeSet::new();
+ y.insert("foo");
+ y.insert("bar");
+
+ let x = x;
+ let y = y;
+ let mut z = x.iter().zip(&y);
+
+ assert_eq!(z.next().unwrap(), (&5, &("bar")));
+ assert_eq!(z.next().unwrap(), (&11, &("foo")));
+ assert!(z.next().is_none());
+}
+
+#[test]
+fn test_from_iter() {
+ let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+
+ let set = BTreeSet::from_iter(xs.iter());
+
+ for x in &xs {
+ assert!(set.contains(x));
+ }
+}
+
+#[test]
+fn test_show() {
+ let mut set = BTreeSet::new();
+ let empty = BTreeSet::<i32>::new();
+
+ set.insert(1);
+ set.insert(2);
+
+ let set_str = format!("{set:?}");
+
+ assert_eq!(set_str, "{1, 2}");
+ assert_eq!(format!("{empty:?}"), "{}");
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = BTreeSet::new();
+ a.insert(1);
+
+ a.extend(&[2, 3, 4]);
+
+ assert_eq!(a.len(), 4);
+ assert!(a.contains(&1));
+ assert!(a.contains(&2));
+ assert!(a.contains(&3));
+ assert!(a.contains(&4));
+
+ let mut b = BTreeSet::new();
+ b.insert(5);
+ b.insert(6);
+
+ a.extend(&b);
+
+ assert_eq!(a.len(), 6);
+ assert!(a.contains(&1));
+ assert!(a.contains(&2));
+ assert!(a.contains(&3));
+ assert!(a.contains(&4));
+ assert!(a.contains(&5));
+ assert!(a.contains(&6));
+}
+
+#[test]
+fn test_recovery() {
+ #[derive(Debug)]
+ struct Foo(&'static str, i32);
+
+ impl PartialEq for Foo {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+ }
+
+ impl Eq for Foo {}
+
+ impl PartialOrd for Foo {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.0.partial_cmp(&other.0)
+ }
+ }
+
+ impl Ord for Foo {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+ }
+
+ let mut s = BTreeSet::new();
+ assert_eq!(s.replace(Foo("a", 1)), None);
+ assert_eq!(s.len(), 1);
+ assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1)));
+ assert_eq!(s.len(), 1);
+
+ {
+ let mut it = s.iter();
+ assert_eq!(it.next(), Some(&Foo("a", 2)));
+ assert_eq!(it.next(), None);
+ }
+
+ assert_eq!(s.get(&Foo("a", 1)), Some(&Foo("a", 2)));
+ assert_eq!(s.take(&Foo("a", 1)), Some(Foo("a", 2)));
+ assert_eq!(s.len(), 0);
+
+ assert_eq!(s.get(&Foo("a", 1)), None);
+ assert_eq!(s.take(&Foo("a", 1)), None);
+
+ assert_eq!(s.iter().next(), None);
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn set<'new>(v: BTreeSet<&'static str>) -> BTreeSet<&'new str> {
+ v
+ }
+ fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> {
+ v
+ }
+ fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> {
+ v
+ }
+ fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> {
+ v
+ }
+ // not applied to Difference, Intersection, SymmetricDifference, Union
+}
+
+#[allow(dead_code)]
+fn assert_sync() {
+ fn set<T: Sync>(v: &BTreeSet<T>) -> impl Sync + '_ {
+ v
+ }
+
+ fn iter<T: Sync>(v: &BTreeSet<T>) -> impl Sync + '_ {
+ v.iter()
+ }
+
+ fn into_iter<T: Sync>(v: BTreeSet<T>) -> impl Sync {
+ v.into_iter()
+ }
+
+ fn range<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
+ v.range(..)
+ }
+
+ fn drain_filter<T: Sync + Ord>(v: &mut BTreeSet<T>) -> impl Sync + '_ {
+ v.drain_filter(|_| false)
+ }
+
+ fn difference<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
+ v.difference(&v)
+ }
+
+ fn intersection<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
+ v.intersection(&v)
+ }
+
+ fn symmetric_difference<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
+ v.symmetric_difference(&v)
+ }
+
+ fn union<T: Sync + Ord>(v: &BTreeSet<T>) -> impl Sync + '_ {
+ v.union(&v)
+ }
+}
+
+#[allow(dead_code)]
+fn assert_send() {
+ fn set<T: Send>(v: BTreeSet<T>) -> impl Send {
+ v
+ }
+
+ fn iter<T: Send + Sync>(v: &BTreeSet<T>) -> impl Send + '_ {
+ v.iter()
+ }
+
+ fn into_iter<T: Send>(v: BTreeSet<T>) -> impl Send {
+ v.into_iter()
+ }
+
+ fn range<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
+ v.range(..)
+ }
+
+ fn drain_filter<T: Send + Ord>(v: &mut BTreeSet<T>) -> impl Send + '_ {
+ v.drain_filter(|_| false)
+ }
+
+ fn difference<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
+ v.difference(&v)
+ }
+
+ fn intersection<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
+ v.intersection(&v)
+ }
+
+ fn symmetric_difference<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
+ v.symmetric_difference(&v)
+ }
+
+ fn union<T: Send + Sync + Ord>(v: &BTreeSet<T>) -> impl Send + '_ {
+ v.union(&v)
+ }
+}
+
+#[allow(dead_code)]
+// Check that the member-like functions conditionally provided by #[derive()]
+// are not overridden by genuine member functions with a different signature.
+fn assert_derives() {
+ fn hash<T: Hash, H: Hasher>(v: BTreeSet<T>, state: &mut H) {
+ v.hash(state);
+ // Tested much more thoroughly outside the crate in btree_set_hash.rs
+ }
+ fn eq<T: PartialEq>(v: BTreeSet<T>) {
+ let _ = v.eq(&v);
+ }
+ fn ne<T: PartialEq>(v: BTreeSet<T>) {
+ let _ = v.ne(&v);
+ }
+ fn cmp<T: Ord>(v: BTreeSet<T>) {
+ let _ = v.cmp(&v);
+ }
+ fn min<T: Ord>(v: BTreeSet<T>, w: BTreeSet<T>) {
+ let _ = v.min(w);
+ }
+ fn max<T: Ord>(v: BTreeSet<T>, w: BTreeSet<T>) {
+ let _ = v.max(w);
+ }
+ fn clamp<T: Ord>(v: BTreeSet<T>, w: BTreeSet<T>, x: BTreeSet<T>) {
+ let _ = v.clamp(w, x);
+ }
+ fn partial_cmp<T: PartialOrd>(v: &BTreeSet<T>) {
+ let _ = v.partial_cmp(&v);
+ }
+}
+
+#[test]
+fn test_ord_absence() {
+ fn set<K>(mut set: BTreeSet<K>) {
+ let _ = set.is_empty();
+ let _ = set.len();
+ set.clear();
+ let _ = set.iter();
+ let _ = set.into_iter();
+ }
+
+ fn set_debug<K: Debug>(set: BTreeSet<K>) {
+ format!("{set:?}");
+ format!("{:?}", set.iter());
+ format!("{:?}", set.into_iter());
+ }
+
+ fn set_clone<K: Clone>(mut set: BTreeSet<K>) {
+ set.clone_from(&set.clone());
+ }
+
+ #[derive(Debug, Clone)]
+ struct NonOrd;
+ set(BTreeSet::<NonOrd>::new());
+ set_debug(BTreeSet::<NonOrd>::new());
+ set_clone(BTreeSet::<NonOrd>::default());
+}
+
+#[test]
+fn test_append() {
+ let mut a = BTreeSet::new();
+ a.insert(1);
+ a.insert(2);
+ a.insert(3);
+
+ let mut b = BTreeSet::new();
+ b.insert(3);
+ b.insert(4);
+ b.insert(5);
+
+ a.append(&mut b);
+
+ assert_eq!(a.len(), 5);
+ assert_eq!(b.len(), 0);
+
+ assert_eq!(a.contains(&1), true);
+ assert_eq!(a.contains(&2), true);
+ assert_eq!(a.contains(&3), true);
+ assert_eq!(a.contains(&4), true);
+ assert_eq!(a.contains(&5), true);
+}
+
+#[test]
+fn test_first_last() {
+ let mut a = BTreeSet::new();
+ assert_eq!(a.first(), None);
+ assert_eq!(a.last(), None);
+ a.insert(1);
+ assert_eq!(a.first(), Some(&1));
+ assert_eq!(a.last(), Some(&1));
+ a.insert(2);
+ assert_eq!(a.first(), Some(&1));
+ assert_eq!(a.last(), Some(&2));
+ for i in 3..=12 {
+ a.insert(i);
+ }
+ assert_eq!(a.first(), Some(&1));
+ assert_eq!(a.last(), Some(&12));
+ assert_eq!(a.pop_first(), Some(1));
+ assert_eq!(a.pop_last(), Some(12));
+ assert_eq!(a.pop_first(), Some(2));
+ assert_eq!(a.pop_last(), Some(11));
+ assert_eq!(a.pop_first(), Some(3));
+ assert_eq!(a.pop_last(), Some(10));
+ assert_eq!(a.pop_first(), Some(4));
+ assert_eq!(a.pop_first(), Some(5));
+ assert_eq!(a.pop_first(), Some(6));
+ assert_eq!(a.pop_first(), Some(7));
+ assert_eq!(a.pop_first(), Some(8));
+ assert_eq!(a.clone().pop_last(), Some(9));
+ assert_eq!(a.pop_first(), Some(9));
+ assert_eq!(a.pop_first(), None);
+ assert_eq!(a.pop_last(), None);
+}
+
+// Unlike the function with the same name in map/tests, returns no values.
+// Which also means it returns different predetermined pseudo-random keys,
+// and the test cases using this function explore slightly different trees.
+fn rand_data(len: usize) -> Vec<u32> {
+ let mut rng = DeterministicRng::new();
+ Vec::from_iter((0..len).map(|_| rng.next()))
+}
+
+#[test]
+fn test_split_off_empty_right() {
+ let mut data = rand_data(173);
+
+ let mut set = BTreeSet::from_iter(data.clone());
+ let right = set.split_off(&(data.iter().max().unwrap() + 1));
+
+ data.sort();
+ assert!(set.into_iter().eq(data));
+ assert!(right.into_iter().eq(None));
+}
+
+#[test]
+fn test_split_off_empty_left() {
+ let mut data = rand_data(314);
+
+ let mut set = BTreeSet::from_iter(data.clone());
+ let right = set.split_off(data.iter().min().unwrap());
+
+ data.sort();
+ assert!(set.into_iter().eq(None));
+ assert!(right.into_iter().eq(data));
+}
+
+#[test]
+fn test_split_off_large_random_sorted() {
+ // Miri is too slow
+ let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) };
+ // special case with maximum height.
+ data.sort();
+
+ let mut set = BTreeSet::from_iter(data.clone());
+ let key = data[data.len() / 2];
+ let right = set.split_off(&key);
+
+ assert!(set.into_iter().eq(data.clone().into_iter().filter(|x| *x < key)));
+ assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key)));
+}
+
+#[test]
+fn from_array() {
+ let set = BTreeSet::from([1, 2, 3, 4]);
+ let unordered_duplicates = BTreeSet::from([4, 1, 4, 3, 2]);
+ assert_eq!(set, unordered_duplicates);
+}
+
+#[should_panic(expected = "range start is greater than range end in BTreeSet")]
+#[test]
+fn test_range_panic_1() {
+ let mut set = BTreeSet::new();
+ set.insert(3);
+ set.insert(5);
+ set.insert(8);
+
+ let _invalid_range = set.range((Included(&8), Included(&3)));
+}
+
+#[should_panic(expected = "range start and end are equal and excluded in BTreeSet")]
+#[test]
+fn test_range_panic_2() {
+ let mut set = BTreeSet::new();
+ set.insert(3);
+ set.insert(5);
+ set.insert(8);
+
+ let _invalid_range = set.range((Excluded(&5), Excluded(&5)));
+}
diff --git a/library/alloc/src/collections/btree/set_val.rs b/library/alloc/src/collections/btree/set_val.rs
new file mode 100644
index 000000000..80c459bcf
--- /dev/null
+++ b/library/alloc/src/collections/btree/set_val.rs
@@ -0,0 +1,29 @@
+/// Zero-Sized Type (ZST) for internal `BTreeSet` values.
+/// Used instead of `()` to differentiate between:
+/// * `BTreeMap<T, ()>` (possible user-defined map)
+/// * `BTreeMap<T, SetValZST>` (internal set representation)
+#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Default)]
+pub struct SetValZST;
+
+/// A trait to differentiate between `BTreeMap` and `BTreeSet` values.
+/// Returns `true` only for type `SetValZST`, `false` for all other types (blanket implementation).
+/// `TypeId` requires a `'static` lifetime, use of this trait avoids that restriction.
+///
+/// [`TypeId`]: std::any::TypeId
+pub trait IsSetVal {
+ fn is_set_val() -> bool;
+}
+
+// Blanket implementation
+impl<V> IsSetVal for V {
+ default fn is_set_val() -> bool {
+ false
+ }
+}
+
+// Specialization
+impl IsSetVal for SetValZST {
+ fn is_set_val() -> bool {
+ true
+ }
+}
diff --git a/library/alloc/src/collections/btree/split.rs b/library/alloc/src/collections/btree/split.rs
new file mode 100644
index 000000000..638dc98fc
--- /dev/null
+++ b/library/alloc/src/collections/btree/split.rs
@@ -0,0 +1,73 @@
+use super::node::{ForceResult::*, Root};
+use super::search::SearchResult::*;
+use core::alloc::Allocator;
+use core::borrow::Borrow;
+
+impl<K, V> Root<K, V> {
+ /// Calculates the length of both trees that result from splitting up
+ /// a given number of distinct key-value pairs.
+ pub fn calc_split_length(
+ total_num: usize,
+ root_a: &Root<K, V>,
+ root_b: &Root<K, V>,
+ ) -> (usize, usize) {
+ let (length_a, length_b);
+ if root_a.height() < root_b.height() {
+ length_a = root_a.reborrow().calc_length();
+ length_b = total_num - length_a;
+ debug_assert_eq!(length_b, root_b.reborrow().calc_length());
+ } else {
+ length_b = root_b.reborrow().calc_length();
+ length_a = total_num - length_b;
+ debug_assert_eq!(length_a, root_a.reborrow().calc_length());
+ }
+ (length_a, length_b)
+ }
+
+ /// Split off a tree with key-value pairs at and after the given key.
+ /// The result is meaningful only if the tree is ordered by key,
+ /// and if the ordering of `Q` corresponds to that of `K`.
+ /// If `self` respects all `BTreeMap` tree invariants, then both
+ /// `self` and the returned tree will respect those invariants.
+ pub fn split_off<Q: ?Sized + Ord, A: Allocator + Clone>(&mut self, key: &Q, alloc: A) -> Self
+ where
+ K: Borrow<Q>,
+ {
+ let left_root = self;
+ let mut right_root = Root::new_pillar(left_root.height(), alloc.clone());
+ let mut left_node = left_root.borrow_mut();
+ let mut right_node = right_root.borrow_mut();
+
+ loop {
+ let mut split_edge = match left_node.search_node(key) {
+ // key is going to the right tree
+ Found(kv) => kv.left_edge(),
+ GoDown(edge) => edge,
+ };
+
+ split_edge.move_suffix(&mut right_node);
+
+ match (split_edge.force(), right_node.force()) {
+ (Internal(edge), Internal(node)) => {
+ left_node = edge.descend();
+ right_node = node.first_edge().descend();
+ }
+ (Leaf(_), Leaf(_)) => break,
+ _ => unreachable!(),
+ }
+ }
+
+ left_root.fix_right_border(alloc.clone());
+ right_root.fix_left_border(alloc);
+ right_root
+ }
+
+ /// Creates a tree consisting of empty nodes.
+ fn new_pillar<A: Allocator + Clone>(height: usize, alloc: A) -> Self {
+ let mut root = Root::new(alloc.clone());
+ for _ in 0..height {
+ root.push_internal_level(alloc.clone());
+ }
+ root
+ }
+}
diff --git a/library/alloc/src/collections/btree/testing/crash_test.rs b/library/alloc/src/collections/btree/testing/crash_test.rs
new file mode 100644
index 000000000..bcf5f5f72
--- /dev/null
+++ b/library/alloc/src/collections/btree/testing/crash_test.rs
@@ -0,0 +1,119 @@
+// We avoid relying on anything else in the crate, apart from the `Debug` trait.
+use crate::fmt::Debug;
+use std::cmp::Ordering;
+use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
+
+/// A blueprint for crash test dummy instances that monitor particular events.
+/// Some instances may be configured to panic at some point.
+/// Events are `clone`, `drop` or some anonymous `query`.
+///
+/// Crash test dummies are identified and ordered by an id, so they can be used
+/// as keys in a BTreeMap.
+#[derive(Debug)]
+pub struct CrashTestDummy {
+ pub id: usize,
+ cloned: AtomicUsize,
+ dropped: AtomicUsize,
+ queried: AtomicUsize,
+}
+
+impl CrashTestDummy {
+ /// Creates a crash test dummy design. The `id` determines order and equality of instances.
+ pub fn new(id: usize) -> CrashTestDummy {
+ CrashTestDummy {
+ id,
+ cloned: AtomicUsize::new(0),
+ dropped: AtomicUsize::new(0),
+ queried: AtomicUsize::new(0),
+ }
+ }
+
+ /// Creates an instance of a crash test dummy that records what events it experiences
+ /// and optionally panics.
+ pub fn spawn(&self, panic: Panic) -> Instance<'_> {
+ Instance { origin: self, panic }
+ }
+
+ /// Returns how many times instances of the dummy have been cloned.
+ pub fn cloned(&self) -> usize {
+ self.cloned.load(SeqCst)
+ }
+
+ /// Returns how many times instances of the dummy have been dropped.
+ pub fn dropped(&self) -> usize {
+ self.dropped.load(SeqCst)
+ }
+
+ /// Returns how many times instances of the dummy have had their `query` member invoked.
+ pub fn queried(&self) -> usize {
+ self.queried.load(SeqCst)
+ }
+}
+
+#[derive(Debug)]
+pub struct Instance<'a> {
+ origin: &'a CrashTestDummy,
+ panic: Panic,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Panic {
+ Never,
+ InClone,
+ InDrop,
+ InQuery,
+}
+
+impl Instance<'_> {
+ pub fn id(&self) -> usize {
+ self.origin.id
+ }
+
+ /// Some anonymous query, the result of which is already given.
+ pub fn query<R>(&self, result: R) -> R {
+ self.origin.queried.fetch_add(1, SeqCst);
+ if self.panic == Panic::InQuery {
+ panic!("panic in `query`");
+ }
+ result
+ }
+}
+
+impl Clone for Instance<'_> {
+ fn clone(&self) -> Self {
+ self.origin.cloned.fetch_add(1, SeqCst);
+ if self.panic == Panic::InClone {
+ panic!("panic in `clone`");
+ }
+ Self { origin: self.origin, panic: Panic::Never }
+ }
+}
+
+impl Drop for Instance<'_> {
+ fn drop(&mut self) {
+ self.origin.dropped.fetch_add(1, SeqCst);
+ if self.panic == Panic::InDrop {
+ panic!("panic in `drop`");
+ }
+ }
+}
+
+impl PartialOrd for Instance<'_> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.id().partial_cmp(&other.id())
+ }
+}
+
+impl Ord for Instance<'_> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.id().cmp(&other.id())
+ }
+}
+
+impl PartialEq for Instance<'_> {
+ fn eq(&self, other: &Self) -> bool {
+ self.id().eq(&other.id())
+ }
+}
+
+impl Eq for Instance<'_> {}
diff --git a/library/alloc/src/collections/btree/testing/mod.rs b/library/alloc/src/collections/btree/testing/mod.rs
new file mode 100644
index 000000000..7a094f8a5
--- /dev/null
+++ b/library/alloc/src/collections/btree/testing/mod.rs
@@ -0,0 +1,3 @@
+pub mod crash_test;
+pub mod ord_chaos;
+pub mod rng;
diff --git a/library/alloc/src/collections/btree/testing/ord_chaos.rs b/library/alloc/src/collections/btree/testing/ord_chaos.rs
new file mode 100644
index 000000000..96ce7c157
--- /dev/null
+++ b/library/alloc/src/collections/btree/testing/ord_chaos.rs
@@ -0,0 +1,81 @@
+use std::cell::Cell;
+use std::cmp::Ordering::{self, *};
+use std::ptr;
+
+// Minimal type with an `Ord` implementation violating transitivity.
+#[derive(Debug)]
+pub enum Cyclic3 {
+ A,
+ B,
+ C,
+}
+use Cyclic3::*;
+
+impl PartialOrd for Cyclic3 {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for Cyclic3 {
+ fn cmp(&self, other: &Self) -> Ordering {
+ match (self, other) {
+ (A, A) | (B, B) | (C, C) => Equal,
+ (A, B) | (B, C) | (C, A) => Less,
+ (A, C) | (B, A) | (C, B) => Greater,
+ }
+ }
+}
+
+impl PartialEq for Cyclic3 {
+ fn eq(&self, other: &Self) -> bool {
+ self.cmp(&other) == Equal
+ }
+}
+
+impl Eq for Cyclic3 {}
+
+// Controls the ordering of values wrapped by `Governed`.
+#[derive(Debug)]
+pub struct Governor {
+ flipped: Cell<bool>,
+}
+
+impl Governor {
+ pub fn new() -> Self {
+ Governor { flipped: Cell::new(false) }
+ }
+
+ pub fn flip(&self) {
+ self.flipped.set(!self.flipped.get());
+ }
+}
+
+// Type with an `Ord` implementation that forms a total order at any moment
+// (assuming that `T` respects total order), but can suddenly be made to invert
+// that total order.
+#[derive(Debug)]
+pub struct Governed<'a, T>(pub T, pub &'a Governor);
+
+impl<T: Ord> PartialOrd for Governed<'_, T> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl<T: Ord> Ord for Governed<'_, T> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ assert!(ptr::eq(self.1, other.1));
+ let ord = self.0.cmp(&other.0);
+ if self.1.flipped.get() { ord.reverse() } else { ord }
+ }
+}
+
+impl<T: PartialEq> PartialEq for Governed<'_, T> {
+ fn eq(&self, other: &Self) -> bool {
+ assert!(ptr::eq(self.1, other.1));
+ self.0.eq(&other.0)
+ }
+}
+
+impl<T: Eq> Eq for Governed<'_, T> {}
diff --git a/library/alloc/src/collections/btree/testing/rng.rs b/library/alloc/src/collections/btree/testing/rng.rs
new file mode 100644
index 000000000..ecf543bee
--- /dev/null
+++ b/library/alloc/src/collections/btree/testing/rng.rs
@@ -0,0 +1,28 @@
+/// XorShiftRng
+pub struct DeterministicRng {
+ count: usize,
+ x: u32,
+ y: u32,
+ z: u32,
+ w: u32,
+}
+
+impl DeterministicRng {
+ pub fn new() -> Self {
+ DeterministicRng { count: 0, x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, w: 0x113ba7bb }
+ }
+
+ /// Guarantees that each returned number is unique.
+ pub fn next(&mut self) -> u32 {
+ self.count += 1;
+ assert!(self.count <= 70029);
+ let x = self.x;
+ let t = x ^ (x << 11);
+ self.x = self.y;
+ self.y = self.z;
+ self.z = self.w;
+ let w_ = self.w;
+ self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8));
+ self.w
+ }
+}