summaryrefslogtreecommitdiffstats
path: root/third_party/rust/fallible_collections
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:47:29 +0000
commit0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d (patch)
treea31f07c9bcca9d56ce61e9a1ffd30ef350d513aa /third_party/rust/fallible_collections
parentInitial commit. (diff)
downloadfirefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.tar.xz
firefox-esr-0ebf5bdf043a27fd3dfb7f92e0cb63d88954c44d.zip
Adding upstream version 115.8.0esr.upstream/115.8.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/fallible_collections')
-rw-r--r--third_party/rust/fallible_collections/.cargo-checksum.json1
-rw-r--r--third_party/rust/fallible_collections/Cargo.toml33
-rw-r--r--third_party/rust/fallible_collections/LICENSE-APACHE201
-rw-r--r--third_party/rust/fallible_collections/LICENSE-MIT25
-rw-r--r--third_party/rust/fallible_collections/README.md77
-rw-r--r--third_party/rust/fallible_collections/src/arc.rs52
-rw-r--r--third_party/rust/fallible_collections/src/boxed.rs151
-rw-r--r--third_party/rust/fallible_collections/src/btree.rs20
-rw-r--r--third_party/rust/fallible_collections/src/btree/map.rs2684
-rw-r--r--third_party/rust/fallible_collections/src/btree/node.rs1676
-rw-r--r--third_party/rust/fallible_collections/src/btree/search.rs66
-rw-r--r--third_party/rust/fallible_collections/src/btree/set.rs1346
-rw-r--r--third_party/rust/fallible_collections/src/format.rs46
-rw-r--r--third_party/rust/fallible_collections/src/hashmap.rs139
-rw-r--r--third_party/rust/fallible_collections/src/lib.rs91
-rw-r--r--third_party/rust/fallible_collections/src/rc.rs35
-rw-r--r--third_party/rust/fallible_collections/src/try_clone.rs39
-rw-r--r--third_party/rust/fallible_collections/src/vec.rs968
18 files changed, 7650 insertions, 0 deletions
diff --git a/third_party/rust/fallible_collections/.cargo-checksum.json b/third_party/rust/fallible_collections/.cargo-checksum.json
new file mode 100644
index 0000000000..7e41b87cf9
--- /dev/null
+++ b/third_party/rust/fallible_collections/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"8ecacb7ad2f59391ae3247157c01c6d6508095f53ba466c3e3b554891b3e941f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"5b817b980bb39f4bee82913daf9d1ef39d1cb9e790b85ab000f735a962ce596d","src/arc.rs":"3cf237ae0acb5b058a57b633170f079024455271e9420e5a9244bafbdeb90b1c","src/boxed.rs":"1f19ad48ab3a1f41cca3756f3fdbc22e97f50a9121511222afcfe1859faf50fa","src/btree.rs":"b83820fc2a00e2e34127b3037abde8b945f0ca2785f3def725787e6813c3d3e0","src/btree/map.rs":"557ce3ff2d02c425adcb2b4ac53b6b6607c25c535aee8ffa4f12bf773fbcd763","src/btree/node.rs":"d943949b8938b5888245d6560efd386c6ae78d23fc3a7a0cc5b06f4da8f4c1c1","src/btree/search.rs":"ae78f73f3e56ea277b0a02cc39454447b75e12a6c817ecfee00065b3ddbfff67","src/btree/set.rs":"607f0db0b189c39b41824fbbf6fd8d9c5fdf85cc40f4437b13152e7b86d2979f","src/format.rs":"5142970f6ac1fe66f667ee2565af786802e93e6728ec3a1b82ffaa9f6a6b5bce","src/hashmap.rs":"1b9bf03fd2f2d9412ea2dad6963e1d37d51662e7091424bfcdc44a502f4e64bc","src/lib.rs":"652532126fdc2a81a927d23e4e4ad810911ee5d398f35f82650b5b4ec9fc5576","src/rc.rs":"f327a0adcfd2b1e225913ae716deb96777ca562985ac64e3b83550111f809864","src/try_clone.rs":"725130e0ddacde1ff7c976de62fbe45d01c67412af395aa41cac4bcfb85f6a5f","src/vec.rs":"27369a12a66deff0fe3fc57eae0f3a639e98b968d92e10eff9d4b7af8354a9d4"},"package":"3f57ccc32870366ae684be48b32a1a2e196f98a42a9b4361fe77e13fd4a34755"} \ No newline at end of file
diff --git a/third_party/rust/fallible_collections/Cargo.toml b/third_party/rust/fallible_collections/Cargo.toml
new file mode 100644
index 0000000000..27a8221a17
--- /dev/null
+++ b/third_party/rust/fallible_collections/Cargo.toml
@@ -0,0 +1,33 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "fallible_collections"
+version = "0.4.6"
+authors = ["vcombey <vcombey@student.42.fr>"]
+description = "a crate which adds fallible allocation api to std collections"
+readme = "README.md"
+keywords = [
+ "fallible",
+ "collections",
+]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/vcombey/fallible_collections.git"
+
+[dependencies.hashbrown]
+version = "0.12.1"
+
+[features]
+rust_1_57 = []
+std = []
+std_io = ["std"]
+unstable = []
diff --git a/third_party/rust/fallible_collections/LICENSE-APACHE b/third_party/rust/fallible_collections/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/third_party/rust/fallible_collections/LICENSE-APACHE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/third_party/rust/fallible_collections/LICENSE-MIT b/third_party/rust/fallible_collections/LICENSE-MIT
new file mode 100644
index 0000000000..25597d5838
--- /dev/null
+++ b/third_party/rust/fallible_collections/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2010 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/third_party/rust/fallible_collections/README.md b/third_party/rust/fallible_collections/README.md
new file mode 100644
index 0000000000..b34825903d
--- /dev/null
+++ b/third_party/rust/fallible_collections/README.md
@@ -0,0 +1,77 @@
+Fallible Collections.rs
+==============
+
+Implement api on rust collection wich returns a result when an allocation error occurs.
+This is inspired a lot by [RFC 2116](https://github.com/rust-lang/rfcs/blob/master/text/2116-alloc-me-maybe.md).
+
+The api currently propose a fallible interface for Vec, Box, Arc, Btree and Rc,
+a TryClone trait wich is implemented for primitive rust traits and a fallible format macro.
+
+You can use this with try_clone_derive crate wich derive TryClone for your own types.
+
+# Getting Started
+
+[fallible collections is available on crates.io](https://crates.io/crates/fallible_collections).
+It is recommended to look there for the newest released version, as well as links to the newest builds of the docs.
+
+At the point of the last update of this README, the latest published version could be used like this:
+
+Add the following dependency to your Cargo manifest...
+Add feature std and rust_1_57 to use the stabilized try_reserve api and the std HashMap type.
+
+```toml
+[dependencies]
+fallible_collections = "0.4"
+
+# or
+fallible_collections = { version = "0.4", features = ["std", "rust_1_57"] }
+```
+
+...and see the [docs](https://docs.rs/fallible_collections) for how to use it.
+
+# Example
+
+Exemple of using the FallibleBox interface.
+```rust
+use fallible_collections::FallibleBox;
+
+fn main() {
+ // this crate an Ordinary box but return an error on allocation failure
+ let mut a = <Box<_> as FallibleBox<_>>::try_new(5).unwrap();
+ let mut b = Box::new(5);
+
+ assert_eq!(a, b);
+ *a = 3;
+ assert_eq!(*a, 3);
+}
+```
+
+Exemple of using the FallibleVec interface.
+```rust
+use fallible_collections::FallibleVec;
+
+fn main() {
+ // this crate an Ordinary Vec<Vec<u8>> but return an error on allocation failure
+ let a: Vec<Vec<u8>> = try_vec![try_vec![42; 10].unwrap(); 100].unwrap();
+ let b: Vec<Vec<u8>> = vec![vec![42; 10]; 100];
+ assert_eq!(a, b);
+ assert_eq!(a.try_clone().unwrap(), a);
+ ...
+}
+```
+
+## License
+
+Licensed under either of
+
+ * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
+ * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
+additional terms or conditions.
+
diff --git a/third_party/rust/fallible_collections/src/arc.rs b/third_party/rust/fallible_collections/src/arc.rs
new file mode 100644
index 0000000000..282b8e5555
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/arc.rs
@@ -0,0 +1,52 @@
+//! Implement a Fallible Arc
+use super::FallibleBox;
+use super::TryClone;
+
+use crate::TryReserveError;
+use alloc::boxed::Box;
+use alloc::sync::Arc;
+
+/// trait to implement Fallible Arc
+#[deprecated(
+ since = "0.3.1",
+ note = "⚠️️️this function is not completely fallible, it can panic !, see [issue](https://github.com/vcombey/fallible_collections/issues/13). help wanted"
+)]
+pub trait FallibleArc<T> {
+ /// try creating a new Arc, returning a Result<Box<T>,
+ /// TryReserveError> if allocation failed
+ fn try_new(t: T) -> Result<Self, TryReserveError>
+ where
+ Self: Sized;
+}
+
+#[allow(deprecated)]
+impl<T> FallibleArc<T> for Arc<T> {
+ fn try_new(t: T) -> Result<Self, TryReserveError> {
+ // doesn't work as the inner variable of arc are also stocked in the box
+
+ let b = <Box<T> as FallibleBox<T>>::try_new(t)?;
+ Ok(Arc::from(b))
+ }
+}
+
+/// Just a TryClone boilerplate for Arc
+impl<T: ?Sized> TryClone for Arc<T> {
+ fn try_clone(&self) -> Result<Self, TryReserveError> {
+ Ok(self.clone())
+ }
+}
+
+#[cfg(test)]
+mod test {
+ #[test]
+ fn fallible_rc() {
+ use std::sync::Arc;
+
+ let mut x = Arc::new(3);
+ *Arc::get_mut(&mut x).unwrap() = 4;
+ assert_eq!(*x, 4);
+
+ let _y = Arc::clone(&x);
+ assert!(Arc::get_mut(&mut x).is_none());
+ }
+}
diff --git a/third_party/rust/fallible_collections/src/boxed.rs b/third_party/rust/fallible_collections/src/boxed.rs
new file mode 100644
index 0000000000..6040754716
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/boxed.rs
@@ -0,0 +1,151 @@
+//! Implement Fallible Box
+use super::TryClone;
+use crate::TryReserveError;
+use alloc::alloc::Layout;
+use alloc::boxed::Box;
+use core::borrow::Borrow;
+use core::ops::Deref;
+use core::ptr::NonNull;
+
+/// trait to implement Fallible Box
+pub trait FallibleBox<T> {
+ /// try creating a new box, returning a Result<Box<T>,
+ /// TryReserveError> if allocation failed
+ fn try_new(t: T) -> Result<Self, TryReserveError>
+ where
+ Self: Sized;
+}
+/// TryBox is a thin wrapper around alloc::boxed::Box to provide support for
+/// fallible allocation.
+///
+/// See the crate documentation for more.
+pub struct TryBox<T> {
+ inner: Box<T>,
+}
+
+impl<T> TryBox<T> {
+ #[inline]
+ pub fn try_new(t: T) -> Result<Self, TryReserveError> {
+ Ok(Self {
+ inner: <Box<T> as FallibleBox<T>>::try_new(t)?,
+ })
+ }
+
+ #[inline(always)]
+ pub fn into_raw(b: TryBox<T>) -> *mut T {
+ Box::into_raw(b.inner)
+ }
+
+ /// # Safety
+ ///
+ /// See std::boxed::from_raw
+ #[inline(always)]
+ pub unsafe fn from_raw(raw: *mut T) -> Self {
+ Self {
+ inner: Box::from_raw(raw),
+ }
+ }
+}
+
+impl<T: TryClone> TryClone for TryBox<T> {
+ fn try_clone(&self) -> Result<Self, TryReserveError> {
+ let clone: T = (*self.inner).try_clone()?;
+ Self::try_new(clone)
+ }
+}
+
+impl<T> Deref for TryBox<T> {
+ type Target = T;
+
+ #[inline(always)]
+ fn deref(&self) -> &T {
+ self.inner.deref()
+ }
+}
+
+fn alloc(layout: Layout) -> Result<NonNull<u8>, TryReserveError> {
+ #[cfg(feature = "unstable")] // requires allocator_api
+ {
+ use core::alloc::Allocator;
+ alloc::alloc::Global
+ .allocate(layout)
+ .map_err(|_e| TryReserveError::AllocError {
+ layout,
+ #[cfg(not(feature = "rust_1_57"))]
+ non_exhaustive: (),
+ })
+ .map(|v| v.cast())
+ }
+ #[cfg(not(feature = "unstable"))]
+ {
+ match layout.size() {
+ 0 => {
+ // Required for alloc safety
+ // See https://doc.rust-lang.org/stable/std/alloc/trait.GlobalAlloc.html#safety-1
+ Ok(NonNull::dangling())
+ }
+ 1..=core::usize::MAX => {
+ let ptr = unsafe { alloc::alloc::alloc(layout) };
+ core::ptr::NonNull::new(ptr).ok_or(TryReserveError::AllocError { layout })
+ }
+ _ => unreachable!("size must be non-negative"),
+ }
+ }
+}
+
+impl<T> FallibleBox<T> for Box<T> {
+ fn try_new(t: T) -> Result<Self, TryReserveError> {
+ let layout = Layout::for_value(&t);
+ let ptr = alloc(layout)?.as_ptr() as *mut T;
+ unsafe {
+ core::ptr::write(ptr, t);
+ Ok(Box::from_raw(ptr))
+ }
+ }
+}
+
+impl<T: TryClone> TryClone for Box<T> {
+ #[inline]
+ fn try_clone(&self) -> Result<Self, TryReserveError> {
+ <Self as FallibleBox<T>>::try_new(Borrow::<T>::borrow(self).try_clone()?)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ #[test]
+ fn boxed() {
+ let mut v = <Box<_> as FallibleBox<_>>::try_new(5).unwrap();
+ assert_eq!(*v, 5);
+ *v = 3;
+ assert_eq!(*v, 3);
+ }
+ // #[test]
+ // fn big_alloc() {
+ // let layout = Layout::from_size_align(1_000_000_000_000, 8).unwrap();
+ // let ptr = unsafe { alloc::alloc::alloc(layout) };
+ // assert!(ptr.is_null());
+ // }
+
+ #[test]
+ fn trybox_zst() {
+ let b = <Box<_> as FallibleBox<_>>::try_new(()).expect("ok");
+ assert_eq!(b, Box::new(()));
+ }
+
+ struct NonCopyType;
+
+ #[test]
+ fn trybox_deref() {
+ let try_box: TryBox<NonCopyType> = TryBox::try_new(NonCopyType {}).unwrap();
+ let _derefed: &NonCopyType = try_box.deref();
+ }
+
+ #[test]
+ fn trybox_as_deref() {
+ let try_box_option: Option<TryBox<NonCopyType>> =
+ Some(TryBox::try_new(NonCopyType).unwrap());
+ let _ref_option: Option<&NonCopyType> = try_box_option.as_deref();
+ }
+}
diff --git a/third_party/rust/fallible_collections/src/btree.rs b/third_party/rust/fallible_collections/src/btree.rs
new file mode 100644
index 0000000000..63d8c6bcf5
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/btree.rs
@@ -0,0 +1,20 @@
+//! Implement Fallible Btree, As there is no try_reserve methods on btree, I add no choice but to fork the std implementation and change return types.
+//! Currently this functionality is only available when building this crate with nightly and the `unstable` feature.
+pub mod map;
+pub use map::BTreeMap;
+
+pub mod set;
+pub use set::BTreeSet;
+
+mod node;
+mod search;
+use crate::TryReserveError;
+
+#[doc(hidden)]
+trait Recover<Q: ?Sized> {
+ type Key;
+
+ fn get(&self, key: &Q) -> Option<&Self::Key>;
+ fn take(&mut self, key: &Q) -> Option<Self::Key>;
+ fn replace(&mut self, key: Self::Key) -> Result<Option<Self::Key>, TryReserveError>;
+}
diff --git a/third_party/rust/fallible_collections/src/btree/map.rs b/third_party/rust/fallible_collections/src/btree/map.rs
new file mode 100644
index 0000000000..3a69a6679f
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/btree/map.rs
@@ -0,0 +1,2684 @@
+use crate::TryReserveError;
+use core::borrow::Borrow;
+use core::cmp::Ordering;
+use core::fmt::Debug;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::marker::PhantomData;
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{Index, RangeBounds};
+use core::{fmt, intrinsics, mem, ptr};
+
+use super::node::{self, marker, ForceResult::*, Handle, InsertResult::*, NodeRef};
+use super::search::{self, SearchResult::*};
+
+use Entry::*;
+use UnderflowResult::*;
+
+/// A map based on a B-Tree.
+///
+/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
+/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal
+/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of
+/// comparisons necessary to find an element (log<sub>2</sub>n). However, in practice the way this
+/// is done is *very* inefficient for modern computer architectures. In particular, every element
+/// is stored in its own individually heap-allocated node. This means that every single insertion
+/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these
+/// are both notably expensive things to do in practice, we are forced to at very least reconsider
+/// the BST strategy.
+///
+/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
+/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
+/// searches. However, this does mean that searches will have to do *more* comparisons on average.
+/// The precise number of comparisons depends on the node search strategy used. For optimal cache
+/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
+/// the node using binary search. As a compromise, one could also perform a linear search
+/// that initially only checks every i<sup>th</sup> element for some choice of i.
+///
+/// Currently, our implementation simply performs naive linear search. This provides excellent
+/// performance on *small* nodes of elements which are cheap to compare. However in the future we
+/// would like to further explore choosing the optimal search strategy based on the choice of B,
+/// and possibly other factors. Using linear search, searching for a random element is expected
+/// to take O(B log<sub>B</sub>n) comparisons, which is generally worse than a BST. In practice,
+/// however, performance is excellent.
+///
+/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
+/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`Ord`]: ../../std/cmp/trait.Ord.html
+/// [`Cell`]: ../../std/cell/struct.Cell.html
+/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, &str>` in this example).
+/// let mut movie_reviews = BTreeMap::new();
+///
+/// // review some movies.
+/// movie_reviews.insert("Office Space", "Deals with real issues in the workplace.");
+/// movie_reviews.insert("Pulp Fiction", "Masterpiece.");
+/// movie_reviews.insert("The Godfather", "Very enjoyable.");
+/// movie_reviews.insert("The Blues Brothers", "Eye lyked it a lot.");
+///
+/// // check for a specific one.
+/// if !movie_reviews.contains_key("Les Misérables") {
+/// println!("We've got {} reviews, but Les Misérables ain't one.",
+/// movie_reviews.len());
+/// }
+///
+/// // oops, this review has a lot of spelling mistakes, let's delete it.
+/// movie_reviews.remove("The Blues Brothers");
+///
+/// // look up the values associated with some keys.
+/// let to_find = ["Up!", "Office Space"];
+/// for book in &to_find {
+/// match movie_reviews.get(book) {
+/// Some(review) => println!("{}: {}", book, review),
+/// None => println!("{} is unreviewed.", book)
+/// }
+/// }
+///
+/// // Look up the value for a key (will panic if the key is not found).
+/// println!("Movie review: {}", movie_reviews["Office Space"]);
+///
+/// // iterate over everything.
+/// for (movie, review) in &movie_reviews {
+/// println!("{}: \"{}\"", movie, review);
+/// }
+/// ```
+///
+/// `BTreeMap` also implements an [`Entry API`](#method.entry), which allows
+/// for more complex methods of getting, setting, updating and removing keys and
+/// their values:
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, u8>` in this example).
+/// let mut player_stats = BTreeMap::new();
+///
+/// fn random_stat_buff() -> u8 {
+/// // could actually return some random value here - let's just return
+/// // some fixed value for now
+/// 42
+/// }
+///
+/// // insert a key only if it doesn't already exist
+/// player_stats.entry("health").or_insert(100);
+///
+/// // insert a key using a function that provides a new value only if it
+/// // doesn't already exist
+/// player_stats.entry("defence").or_insert_with(random_stat_buff);
+///
+/// // update a key, guarding against the key possibly not being set
+/// let stat = player_stats.entry("attack").or_insert(100);
+/// *stat += random_stat_buff();
+/// ```
+
+pub struct BTreeMap<K, V> {
+ root: node::Root<K, V>,
+ length: usize,
+}
+
+unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
+ fn drop(&mut self) {
+ unsafe {
+ drop(ptr::read(self).into_iter());
+ }
+ }
+}
+
+use crate::TryClone;
+
+impl<K: TryClone, V: TryClone> TryClone for BTreeMap<K, V> {
+ fn try_clone(&self) -> Result<BTreeMap<K, V>, TryReserveError> {
+ fn clone_subtree<'a, K: TryClone, V: TryClone>(
+ node: node::NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
+ ) -> Result<BTreeMap<K, V>, TryReserveError>
+ where
+ K: 'a,
+ V: 'a,
+ {
+ match node.force() {
+ Leaf(leaf) => {
+ let mut out_tree = BTreeMap {
+ root: node::Root::new_leaf()?,
+ length: 0,
+ };
+
+ {
+ let mut out_node = match out_tree.root.as_mut().force() {
+ Leaf(leaf) => leaf,
+ Internal(_) => unreachable!(),
+ };
+
+ let mut in_edge = leaf.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ out_node.push(k.try_clone()?, v.try_clone()?);
+ out_tree.length += 1;
+ }
+ }
+
+ Ok(out_tree)
+ }
+ Internal(internal) => {
+ let mut out_tree = clone_subtree(internal.first_edge().descend())?;
+
+ {
+ let mut out_node = out_tree.root.push_level()?;
+ let mut in_edge = internal.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ let k = (*k).try_clone()?;
+ let v = (*v).try_clone()?;
+ let subtree = clone_subtree(in_edge.descend())?;
+
+ // We can't destructure subtree directly
+ // because BTreeMap implements Drop
+ let (subroot, sublength) = unsafe {
+ let root = ptr::read(&subtree.root);
+ let length = subtree.length;
+ mem::forget(subtree);
+ (root, length)
+ };
+
+ out_node.push(k, v, subroot);
+ out_tree.length += 1 + sublength;
+ }
+ }
+
+ Ok(out_tree)
+ }
+ }
+ }
+
+ if self.len() == 0 {
+ // Ideally we'd call `BTreeMap::new` here, but that has the `K:
+ // Ord` constraint, which this method lacks.
+ Ok(BTreeMap {
+ root: node::Root::shared_empty_root(),
+ length: 0,
+ })
+ } else {
+ clone_subtree(self.root.as_ref())
+ }
+ }
+}
+
+impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
+ fn clone(&self) -> BTreeMap<K, V> {
+ fn clone_subtree<'a, K: Clone, V: Clone>(
+ node: node::NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
+ ) -> BTreeMap<K, V>
+ where
+ K: 'a,
+ V: 'a,
+ {
+ match node.force() {
+ Leaf(leaf) => {
+ let mut out_tree = BTreeMap {
+ root: node::Root::new_leaf().expect("Out of Mem"),
+ length: 0,
+ };
+
+ {
+ let mut out_node = match out_tree.root.as_mut().force() {
+ Leaf(leaf) => leaf,
+ Internal(_) => unreachable!(),
+ };
+
+ let mut in_edge = leaf.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ out_node.push(k.clone(), v.clone());
+ out_tree.length += 1;
+ }
+ }
+
+ out_tree
+ }
+ Internal(internal) => {
+ let mut out_tree = clone_subtree(internal.first_edge().descend());
+
+ {
+ let mut out_node = out_tree.root.push_level().expect("Out of Mem");
+ let mut in_edge = internal.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ let k = (*k).clone();
+ let v = (*v).clone();
+ let subtree = clone_subtree(in_edge.descend());
+
+ // We can't destructure subtree directly
+ // because BTreeMap implements Drop
+ let (subroot, sublength) = unsafe {
+ let root = ptr::read(&subtree.root);
+ let length = subtree.length;
+ mem::forget(subtree);
+ (root, length)
+ };
+
+ out_node.push(k, v, subroot);
+ out_tree.length += 1 + sublength;
+ }
+ }
+
+ out_tree
+ }
+ }
+ }
+
+ if self.len() == 0 {
+ // Ideally we'd call `BTreeMap::new` here, but that has the `K:
+ // Ord` constraint, which this method lacks.
+ BTreeMap {
+ root: node::Root::shared_empty_root(),
+ length: 0,
+ }
+ } else {
+ clone_subtree(self.root.as_ref())
+ }
+ }
+}
+
+impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
+where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+{
+ type Key = K;
+
+ fn get(&self, key: &Q) -> Option<&K> {
+ match search::search_tree(self.root.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().0),
+ GoDown(_) => None,
+ }
+ }
+
+ fn take(&mut self, key: &Q) -> Option<K> {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => Some(
+ OccupiedEntry {
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }
+ .remove_kv()
+ .0,
+ ),
+ GoDown(_) => None,
+ }
+ }
+
+ fn replace(&mut self, key: K) -> Result<Option<K>, TryReserveError> {
+ self.ensure_root_is_owned()?;
+ match search::search_tree::<marker::Mut<'_>, K, (), K>(self.root.as_mut(), &key) {
+ Found(handle) => Ok(Some(mem::replace(handle.into_kv_mut().0, key))),
+ GoDown(handle) => {
+ VacantEntry {
+ key,
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }
+ .try_insert(())?;
+ Ok(None)
+ }
+ }
+ }
+}
+
+/// An iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.BTreeMap.html#method.iter
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+pub struct Iter<'a, K: 'a, V: 'a> {
+ range: Range<'a, K, V>,
+ length: usize,
+}
+
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.BTreeMap.html#method.iter_mut
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+#[derive(Debug)]
+pub struct IterMut<'a, K: 'a, V: 'a> {
+ range: RangeMut<'a, K, V>,
+ length: usize,
+}
+
+/// An owning iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`][`BTreeMap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.BTreeMap.html#method.into_iter
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+pub struct IntoIter<K, V> {
+ front: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+ length: usize,
+}
+
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let range = Range {
+ front: self.front.reborrow(),
+ back: self.back.reborrow(),
+ };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+/// An iterator over the keys of a `BTreeMap`.
+///
+/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`keys`]: struct.BTreeMap.html#method.keys
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+pub struct Keys<'a, K: 'a, V: 'a> {
+ inner: Iter<'a, K, V>,
+}
+
+impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// An iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values`]: struct.BTreeMap.html#method.values
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+pub struct Values<'a, K: 'a, V: 'a> {
+ inner: Iter<'a, K, V>,
+}
+
+impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values_mut`]: struct.BTreeMap.html#method.values_mut
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+#[derive(Debug)]
+pub struct ValuesMut<'a, K: 'a, V: 'a> {
+ inner: IterMut<'a, K, V>,
+}
+
+/// An iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range`]: struct.BTreeMap.html#method.range
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+pub struct Range<'a, K: 'a, V: 'a> {
+ front: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
+}
+
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Range<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range_mut`]: struct.BTreeMap.html#method.range_mut
+/// [`BTreeMap`]: struct.BTreeMap.html
+
+pub struct RangeMut<'a, K: 'a, V: 'a> {
+ front: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let range = Range {
+ front: self.front.reborrow(),
+ back: self.back.reborrow(),
+ };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
+///
+/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`entry`]: struct.BTreeMap.html#method.entry
+
+pub enum Entry<'a, K: 'a, V: 'a> {
+ /// A vacant entry.
+ Vacant(VacantEntry<'a, K, V>),
+
+ /// An occupied entry.
+ Occupied(OccupiedEntry<'a, K, V>),
+}
+
+impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+ Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+ }
+ }
+}
+
+/// A view into a vacant entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+
+pub struct VacantEntry<'a, K: 'a, V: 'a> {
+ key: K,
+ handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ length: &'a mut usize,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("VacantEntry").field(self.key()).finish()
+ }
+}
+
+/// A view into an occupied entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+
+pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
+ handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
+
+ length: &'a mut usize,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("OccupiedEntry")
+ .field("key", self.key())
+ .field("value", self.get())
+ .finish()
+ }
+}
+
+// An iterator for merging two sorted sequences into one
+struct MergeIter<K, V, I: Iterator<Item = (K, V)>> {
+ left: Peekable<I>,
+ right: Peekable<I>,
+}
+
+impl<K: Ord, V> BTreeMap<K, V> {
+ /// Makes a new empty BTreeMap with a reasonable choice for B.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ ///
+ /// // entries can now be inserted into the empty map
+ /// map.insert(1, "a");
+ /// ```
+
+ pub fn new() -> BTreeMap<K, V> {
+ BTreeMap {
+ root: node::Root::shared_empty_root(),
+ length: 0,
+ }
+ }
+
+ /// Clears the map, removing all values.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.clear();
+ /// assert!(a.is_empty());
+ /// ```
+
+ pub fn clear(&mut self) {
+ *self = BTreeMap::new();
+ }
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+
+ pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().1),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns the key-value pair corresponding to the supplied key.
+ ///
+ /// The supplied key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_get_key_value)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+ /// assert_eq!(map.get_key_value(&2), None);
+ /// ```
+ pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_ref(), k) {
+ Found(handle) => Some(handle.into_kv()),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns `true` if the map contains a value for the specified key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.contains_key(&1), true);
+ /// assert_eq!(map.contains_key(&2), false);
+ /// ```
+
+ #[inline]
+ pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ self.get(key).is_some()
+ }
+
+ /// Returns a mutable reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// if let Some(x) = map.get_mut(&1) {
+ /// *x = "b";
+ /// }
+ /// assert_eq!(map[&1], "b");
+ /// ```
+ // See `get` for implementation notes, this is basically a copy-paste with mut's added
+
+ pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => Some(handle.into_kv_mut().1),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Inserts a key-value pair into the map.
+ ///
+ /// If the map did not have this key present, `None` is returned.
+ ///
+ /// If the map did have this key present, the value is updated, and the old
+ /// value is returned. The key is not updated, though; this matters for
+ /// types that can be `==` without being identical. See the [module-level
+ /// documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// assert_eq!(map.insert(37, "a"), None);
+ /// assert_eq!(map.is_empty(), false);
+ ///
+ /// map.insert(37, "b");
+ /// assert_eq!(map.insert(37, "c"), Some("b"));
+ /// assert_eq!(map[&37], "c");
+ /// ```
+
+ pub fn try_insert(&mut self, key: K, value: V) -> Result<Option<V>, TryReserveError> {
+ match self.try_entry(key)? {
+ Occupied(mut entry) => Ok(Some(entry.insert(value))),
+ Vacant(entry) => {
+ entry.try_insert(value)?;
+ Ok(None)
+ }
+ }
+ }
+
+ /// Removes a key from the map, returning the value at the key if the key
+ /// was previously in the map.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.remove(&1), Some("a"));
+ /// assert_eq!(map.remove(&1), None);
+ /// ```
+
+ pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => Some(
+ OccupiedEntry {
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }
+ .remove(),
+ ),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Moves all elements from `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ ///
+ /// let mut b = BTreeMap::new();
+ /// b.insert(3, "d");
+ /// b.insert(4, "e");
+ /// b.insert(5, "f");
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ /// assert_eq!(a[&3], "d");
+ /// assert_eq!(a[&4], "e");
+ /// assert_eq!(a[&5], "f");
+ /// ```
+
+ pub fn append(&mut self, other: &mut Self) {
+ // Do we have to append anything at all?
+ if other.len() == 0 {
+ return;
+ }
+
+ // We can just swap `self` and `other` if `self` is empty.
+ if self.len() == 0 {
+ mem::swap(self, other);
+ return;
+ }
+
+ // First, we merge `self` and `other` into a sorted sequence in linear time.
+ let self_iter = mem::replace(self, BTreeMap::new()).into_iter();
+ let other_iter = mem::replace(other, BTreeMap::new()).into_iter();
+ let iter = MergeIter {
+ left: self_iter.peekable(),
+ right: other_iter.peekable(),
+ };
+
+ // Second, we build a tree from the sorted sequence in linear time.
+ self.from_sorted_iter(iter);
+ self.fix_right_edge();
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "a");
+ /// map.insert(5, "b");
+ /// map.insert(8, "c");
+ /// for (&key, &value) in map.range((Included(&4), Included(&8))) {
+ /// println!("{}: {}", key, value);
+ /// }
+ /// assert_eq!(Some((&5, &"b")), map.range(4..).next());
+ /// ```
+
+ pub fn range<T: ?Sized, R>(&self, range: R) -> Range<'_, K, V>
+ where
+ T: Ord,
+ K: Borrow<T>,
+ R: RangeBounds<T>,
+ {
+ let root1 = self.root.as_ref();
+ let root2 = self.root.as_ref();
+ let (f, b) = range_search(root1, root2, range);
+
+ Range { front: f, back: b }
+ }
+
+ /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"]
+ /// .iter()
+ /// .map(|&s| (s, 0))
+ /// .collect();
+ /// for (_, balance) in map.range_mut("B".."Cheryl") {
+ /// *balance += 100;
+ /// }
+ /// for (name, balance) in &map {
+ /// println!("{} => {}", name, balance);
+ /// }
+ /// ```
+
+ pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<'_, K, V>
+ where
+ T: Ord,
+ K: Borrow<T>,
+ R: RangeBounds<T>,
+ {
+ let root1 = self.root.as_mut();
+ let root2 = unsafe { ptr::read(&root1) };
+ let (f, b) = range_search(root1, root2, range);
+
+ RangeMut {
+ front: f,
+ back: b,
+ _marker: PhantomData,
+ }
+ }
+
+ /// Gets the given key's corresponding entry in the map for in-place manipulation.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// // count the number of occurrences of letters in the vec
+ /// for x in vec!["a","b","a","c","a","b"] {
+ /// *count.entry(x).or_insert(0) += 1;
+ /// }
+ ///
+ /// assert_eq!(count["a"], 3);
+ /// ```
+
+ pub fn try_entry(&mut self, key: K) -> Result<Entry<'_, K, V>, TryReserveError> {
+ // FIXME(@porglezomp) Avoid allocating if we don't insert
+ self.ensure_root_is_owned()?;
+ Ok(match search::search_tree(self.root.as_mut(), &key) {
+ Found(handle) => Occupied(OccupiedEntry {
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }),
+ GoDown(handle) => Vacant(VacantEntry {
+ key,
+ handle,
+ length: &mut self.length,
+ _marker: PhantomData,
+ }),
+ })
+ }
+
+ fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
+ self.ensure_root_is_owned().expect("Out Of Mem");
+ let mut cur_node = last_leaf_edge(self.root.as_mut()).into_node();
+ // Iterate through all key-value pairs, pushing them into nodes at the right level.
+ for (key, value) in iter {
+ // Try to push key-value pair into the current leaf node.
+ if cur_node.len() < node::CAPACITY {
+ cur_node.push(key, value);
+ } else {
+ // No space left, go up and push there.
+ let mut open_node;
+ let mut test_node = cur_node.forget_type();
+ loop {
+ match test_node.ascend() {
+ Ok(parent) => {
+ let parent = parent.into_node();
+ if parent.len() < node::CAPACITY {
+ // Found a node with space left, push here.
+ open_node = parent;
+ break;
+ } else {
+ // Go up again.
+ test_node = parent.forget_type();
+ }
+ }
+ Err(node) => {
+ // We are at the top, create a new root node and push there.
+ open_node = node.into_root_mut().push_level().expect("Out of Mem");
+ break;
+ }
+ }
+ }
+
+ // Push key-value pair and new right subtree.
+ let tree_height = open_node.height() - 1;
+ let mut right_tree = node::Root::new_leaf().expect("Out of Mem");
+ for _ in 0..tree_height {
+ right_tree.push_level().expect("Out of Mem");
+ }
+ open_node.push(key, value, right_tree);
+
+ // Go down to the right-most leaf again.
+ cur_node = last_leaf_edge(open_node.forget_type()).into_node();
+ }
+
+ self.length += 1;
+ }
+ }
+
+ fn fix_right_edge(&mut self) {
+ // Handle underfull nodes, start from the top.
+ let mut cur_node = self.root.as_mut();
+ while let Internal(internal) = cur_node.force() {
+ // Check if right-most child is underfull.
+ let mut last_edge = internal.last_edge();
+ let right_child_len = last_edge.reborrow().descend().len();
+ if right_child_len < node::MIN_LEN {
+ // We need to steal.
+ let mut last_kv = match last_edge.left_kv() {
+ Ok(left) => left,
+ Err(_) => unreachable!(),
+ };
+ last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
+ last_edge = last_kv.right_edge();
+ }
+
+ // Go further down.
+ cur_node = last_edge.descend();
+ }
+ }
+
+ /// Splits the collection into two at the given key. Returns everything after the given key,
+ /// including the key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(17, "d");
+ /// a.insert(41, "e");
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ ///
+ /// assert_eq!(b[&3], "c");
+ /// assert_eq!(b[&17], "d");
+ /// assert_eq!(b[&41], "e");
+ /// ```
+
+ pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Result<Self, TryReserveError>
+ where
+ K: Borrow<Q>,
+ {
+ if self.is_empty() {
+ return Ok(Self::new());
+ }
+
+ let total_num = self.len();
+
+ let mut right = Self::new();
+ right.root = node::Root::new_leaf()?;
+ for _ in 0..(self.root.as_ref().height()) {
+ right.root.push_level()?;
+ }
+
+ {
+ let mut left_node = self.root.as_mut();
+ let mut right_node = right.root.as_mut();
+
+ loop {
+ let mut split_edge = match search::search_node(left_node, key) {
+ // key is going to the right tree
+ Found(handle) => handle.left_edge(),
+ GoDown(handle) => handle,
+ };
+
+ split_edge.move_suffix(&mut right_node);
+
+ match (split_edge.force(), right_node.force()) {
+ (Internal(edge), Internal(node)) => {
+ left_node = edge.descend();
+ right_node = node.first_edge().descend();
+ }
+ (Leaf(_), Leaf(_)) => {
+ break;
+ }
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+
+ self.fix_right_border();
+ right.fix_left_border();
+
+ if self.root.as_ref().height() < right.root.as_ref().height() {
+ self.recalc_length();
+ right.length = total_num - self.len();
+ } else {
+ right.recalc_length();
+ self.length = total_num - right.len();
+ }
+
+ Ok(right)
+ }
+
+ /// Calculates the number of elements if it is incorrect.
+ fn recalc_length(&mut self) {
+ fn dfs<'a, K, V>(node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>) -> usize
+ where
+ K: 'a,
+ V: 'a,
+ {
+ let mut res = node.len();
+
+ if let Internal(node) = node.force() {
+ let mut edge = node.first_edge();
+ loop {
+ res += dfs(edge.reborrow().descend());
+ match edge.right_kv() {
+ Ok(right_kv) => {
+ edge = right_kv.right_edge();
+ }
+ Err(_) => {
+ break;
+ }
+ }
+ }
+ }
+
+ res
+ }
+
+ self.length = dfs(self.root.as_ref());
+ }
+
+ /// Removes empty levels on the top.
+ fn fix_top(&mut self) {
+ loop {
+ {
+ let node = self.root.as_ref();
+ if node.height() == 0 || node.len() > 0 {
+ break;
+ }
+ }
+ self.root.pop_level();
+ }
+ }
+
+ fn fix_right_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.root.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut last_kv = node.last_kv();
+
+ if last_kv.can_merge() {
+ cur_node = last_kv.merge().descend();
+ } else {
+ let right_len = last_kv.reborrow().right_edge().descend().len();
+ // `MINLEN + 1` to avoid readjust if merge happens on the next level.
+ if right_len < node::MIN_LEN + 1 {
+ last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
+ }
+ cur_node = last_kv.right_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+
+ /// The symmetric clone of `fix_right_border`.
+ fn fix_left_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.root.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut first_kv = node.first_kv();
+
+ if first_kv.can_merge() {
+ cur_node = first_kv.merge().descend();
+ } else {
+ let left_len = first_kv.reborrow().left_edge().descend().len();
+ if left_len < node::MIN_LEN + 1 {
+ first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
+ }
+ cur_node = first_kv.left_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+
+ /// If the root node is the shared root node, allocate our own node.
+ fn ensure_root_is_owned(&mut self) -> Result<(), TryReserveError> {
+ if self.root.is_shared_root() {
+ self.root = node::Root::new_leaf()?;
+ }
+ Ok(())
+ }
+}
+
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap<K, V> {
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ fn into_iter(self) -> Iter<'a, K, V> {
+ self.iter()
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_unchecked()) }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+impl<K, V> FusedIterator for Iter<'_, K, V> {}
+
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_back_unchecked()) }
+ }
+ }
+}
+
+impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+impl<K, V> Clone for Iter<'_, K, V> {
+ fn clone(&self) -> Self {
+ Iter {
+ range: self.range.clone(),
+ length: self.length,
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap<K, V> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
+
+ #[inline(always)]
+ fn into_iter(self) -> IterMut<'a, K, V> {
+ self.iter_mut()
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_unchecked()) }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_back_unchecked()) }
+ }
+ }
+}
+
+impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+impl<K, V> FusedIterator for IterMut<'_, K, V> {}
+
+impl<K, V> IntoIterator for BTreeMap<K, V> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
+
+ fn into_iter(self) -> IntoIter<K, V> {
+ let root1 = unsafe { ptr::read(&self.root).into_ref() };
+ let root2 = unsafe { ptr::read(&self.root).into_ref() };
+ let len = self.length;
+ mem::forget(self);
+
+ IntoIter {
+ front: first_leaf_edge(root1),
+ back: last_leaf_edge(root2),
+ length: len,
+ }
+ }
+}
+
+impl<K, V> Drop for IntoIter<K, V> {
+ fn drop(&mut self) {
+ self.for_each(drop);
+ unsafe {
+ let leaf_node = ptr::read(&self.front).into_node();
+ if leaf_node.is_shared_root() {
+ return;
+ }
+
+ if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
+ let mut cur_node = first_parent.into_node();
+ while let Some(parent) = cur_node.deallocate_and_ascend() {
+ cur_node = parent.into_node()
+ }
+ }
+ }
+ }
+}
+
+impl<K, V> Iterator for IntoIter<K, V> {
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ return None;
+ } else {
+ self.length -= 1;
+ }
+
+ let handle = unsafe { ptr::read(&self.front) };
+
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.front = kv.right_edge();
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
+ },
+ };
+
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
+ },
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
+ fn next_back(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ return None;
+ } else {
+ self.length -= 1;
+ }
+
+ let handle = unsafe { ptr::read(&self.back) };
+
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.back = kv.left_edge();
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
+ },
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return Some((k, v));
+ }
+ Err(last_edge) => unsafe {
+ cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
+ },
+ }
+ }
+ }
+}
+
+impl<K, V> ExactSizeIterator for IntoIter<K, V> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+impl<K, V> FusedIterator for IntoIter<K, V> {}
+
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+ type Item = &'a K;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a K> {
+ self.inner.next().map(|(k, _)| k)
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a K> {
+ self.inner.next_back().map(|(k, _)| k)
+ }
+}
+
+impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<K, V> FusedIterator for Keys<'_, K, V> {}
+
+impl<K, V> Clone for Keys<'_, K, V> {
+ #[inline(always)]
+ fn clone(&self) -> Self {
+ Keys {
+ inner: self.inner.clone(),
+ }
+ }
+}
+
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+ type Item = &'a V;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+impl<K, V> ExactSizeIterator for Values<'_, K, V> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<K, V> FusedIterator for Values<'_, K, V> {}
+
+impl<K, V> Clone for Values<'_, K, V> {
+ #[inline(always)]
+ fn clone(&self) -> Self {
+ Values {
+ inner: self.inner.clone(),
+ }
+ }
+}
+
+impl<'a, K, V> Iterator for Range<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_unchecked()) }
+ }
+ }
+}
+
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+ type Item = &'a mut V;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a mut V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a mut V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
+
+impl<'a, K, V> Range<'a, K, V> {
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+ let handle = self.front;
+
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.front = kv.right_edge();
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_back_unchecked()) }
+ }
+ }
+}
+
+impl<'a, K, V> Range<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+ let handle = self.back;
+
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.back = kv.left_edge();
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return ret;
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+impl<K, V> FusedIterator for Range<'_, K, V> {}
+
+impl<K, V> Clone for Range<'_, K, V> {
+ #[inline]
+ fn clone(&self) -> Self {
+ Range {
+ front: self.front,
+ back: self.back,
+ }
+ }
+}
+
+impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ #[inline]
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_unchecked()) }
+ }
+ }
+}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let handle = ptr::read(&self.front);
+
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ self.front = ptr::read(&kv).right_edge();
+ // Doing the descend invalidates the references returned by `into_kv_mut`,
+ // so we have to do this last.
+ let (k, v) = kv.into_kv_mut();
+ return (k, v); // coerce k from `&mut K` to `&K`
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ self.front = first_leaf_edge(ptr::read(&kv).right_edge().descend());
+ // Doing the descend invalidates the references returned by `into_kv_mut`,
+ // so we have to do this last.
+ let (k, v) = kv.into_kv_mut();
+ return (k, v); // coerce k from `&mut K` to `&K`
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
+ #[inline]
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_back_unchecked()) }
+ }
+ }
+}
+
+impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let handle = ptr::read(&self.back);
+
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ self.back = ptr::read(&kv).left_edge();
+ // Doing the descend invalidates the references returned by `into_kv_mut`,
+ // so we have to do this last.
+ let (k, v) = kv.into_kv_mut();
+ return (k, v); // coerce k from `&mut K` to `&K`
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ self.back = last_leaf_edge(ptr::read(&kv).left_edge().descend());
+ // Doing the descend invalidates the references returned by `into_kv_mut`,
+ // so we have to do this last.
+ let (k, v) = kv.into_kv_mut();
+ return (k, v); // coerce k from `&mut K` to `&K`
+ }
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
+ }
+}
+
+impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
+ #[inline]
+ fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
+ let mut map = BTreeMap::new();
+ map.extend(iter);
+ map
+ }
+}
+
+impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
+ #[inline]
+ fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
+ iter.into_iter().for_each(move |(k, v)| {
+ self.try_insert(k, v).expect("Out of Mem");
+ });
+ }
+}
+
+impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+ }
+}
+
+impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ for elt in self {
+ elt.hash(state);
+ }
+ }
+}
+
+impl<K: Ord, V> Default for BTreeMap<K, V> {
+ /// Creates an empty `BTreeMap<K, V>`.
+ #[inline(always)]
+ fn default() -> BTreeMap<K, V> {
+ BTreeMap::new()
+ }
+}
+
+impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
+ fn eq(&self, other: &BTreeMap<K, V>) -> bool {
+ self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
+ }
+}
+
+impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
+
+impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
+ #[inline]
+ fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
+ #[inline]
+ fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_map().entries(self.iter()).finish()
+ }
+}
+
+impl<K: Ord, Q: ?Sized, V> Index<&Q> for BTreeMap<K, V>
+where
+ K: Borrow<Q>,
+ Q: Ord,
+{
+ type Output = V;
+
+ /// Returns a reference to the value corresponding to the supplied key.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the key is not present in the `BTreeMap`.
+ #[inline]
+ fn index(&self, key: &Q) -> &V {
+ self.get(key).expect("no entry found for key")
+ }
+}
+
+fn first_leaf_edge<BorrowType, K, V>(
+ mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.first_edge(),
+ Internal(internal) => {
+ node = internal.first_edge().descend();
+ }
+ }
+ }
+}
+
+fn last_leaf_edge<BorrowType, K, V>(
+ mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.last_edge(),
+ Internal(internal) => {
+ node = internal.last_edge().descend();
+ }
+ }
+ }
+}
+
+fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
+ root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ range: R,
+) -> (
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+)
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ match (range.start_bound(), range.end_bound()) {
+ (Excluded(s), Excluded(e)) if s == e => {
+ panic!("range start and end are equal and excluded in BTreeMap")
+ }
+ (Included(s), Included(e))
+ | (Included(s), Excluded(e))
+ | (Excluded(s), Included(e))
+ | (Excluded(s), Excluded(e))
+ if s > e =>
+ {
+ panic!("range start is greater than range end in BTreeMap")
+ }
+ _ => {}
+ };
+
+ let mut min_node = root1;
+ let mut max_node = root2;
+ let mut min_found = false;
+ let mut max_found = false;
+ let mut diverged = false;
+
+ loop {
+ let min_edge = match (min_found, range.start_bound()) {
+ (false, Included(key)) => match search::search_linear(&min_node, key) {
+ (i, true) => {
+ min_found = true;
+ i
+ }
+ (i, false) => i,
+ },
+ (false, Excluded(key)) => match search::search_linear(&min_node, key) {
+ (i, true) => {
+ min_found = true;
+ i + 1
+ }
+ (i, false) => i,
+ },
+ (_, Unbounded) => 0,
+ (true, Included(_)) => min_node.keys().len(),
+ (true, Excluded(_)) => 0,
+ };
+
+ let max_edge = match (max_found, range.end_bound()) {
+ (false, Included(key)) => match search::search_linear(&max_node, key) {
+ (i, true) => {
+ max_found = true;
+ i + 1
+ }
+ (i, false) => i,
+ },
+ (false, Excluded(key)) => match search::search_linear(&max_node, key) {
+ (i, true) => {
+ max_found = true;
+ i
+ }
+ (i, false) => i,
+ },
+ (_, Unbounded) => max_node.keys().len(),
+ (true, Included(_)) => 0,
+ (true, Excluded(_)) => max_node.keys().len(),
+ };
+
+ if !diverged {
+ if max_edge < min_edge {
+ panic!("Ord is ill-defined in BTreeMap range")
+ }
+ if min_edge != max_edge {
+ diverged = true;
+ }
+ }
+
+ let front = Handle::new_edge(min_node, min_edge);
+ let back = Handle::new_edge(max_node, max_edge);
+ match (front.force(), back.force()) {
+ (Leaf(f), Leaf(b)) => {
+ return (f, b);
+ }
+ (Internal(min_int), Internal(max_int)) => {
+ min_node = min_int.descend();
+ max_node = max_int.descend();
+ }
+ _ => unreachable!("BTreeMap has different depths"),
+ };
+ }
+}
+
+#[inline(always)]
+unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
+ val.unwrap_or_else(|| {
+ if cfg!(debug_assertions) {
+ panic!("'unchecked' unwrap on None in BTreeMap");
+ } else {
+ intrinsics::unreachable();
+ }
+ })
+}
+
+impl<K, V> BTreeMap<K, V> {
+ /// Gets an iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "c");
+ /// map.insert(2, "b");
+ /// map.insert(1, "a");
+ ///
+ /// for (key, value) in map.iter() {
+ /// println!("{}: {}", key, value);
+ /// }
+ ///
+ /// let (first_key, first_value) = map.iter().next().unwrap();
+ /// assert_eq!((*first_key, *first_value), (1, "a"));
+ /// ```
+
+ pub fn iter(&self) -> Iter<'_, K, V> {
+ Iter {
+ range: Range {
+ front: first_leaf_edge(self.root.as_ref()),
+ back: last_leaf_edge(self.root.as_ref()),
+ },
+ length: self.length,
+ }
+ }
+
+ /// Gets a mutable iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert("a", 1);
+ /// map.insert("b", 2);
+ /// map.insert("c", 3);
+ ///
+ /// // add 10 to the value if the key isn't "a"
+ /// for (key, value) in map.iter_mut() {
+ /// if key != &"a" {
+ /// *value += 10;
+ /// }
+ /// }
+ /// ```
+
+ pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
+ let root1 = self.root.as_mut();
+ let root2 = unsafe { ptr::read(&root1) };
+ IterMut {
+ range: RangeMut {
+ front: first_leaf_edge(root1),
+ back: last_leaf_edge(root2),
+ _marker: PhantomData,
+ },
+ length: self.length,
+ }
+ }
+
+ /// Gets an iterator over the keys of the map, in sorted order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(2, "b");
+ /// a.insert(1, "a");
+ ///
+ /// let keys: Vec<_> = a.keys().cloned().collect();
+ /// assert_eq!(keys, [1, 2]);
+ /// ```
+
+ #[inline(always)]
+ pub fn keys<'a>(&'a self) -> Keys<'a, K, V> {
+ Keys { inner: self.iter() }
+ }
+
+ /// Gets an iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "hello");
+ /// a.insert(2, "goodbye");
+ ///
+ /// let values: Vec<&str> = a.values().cloned().collect();
+ /// assert_eq!(values, ["hello", "goodbye"]);
+ /// ```
+
+ #[inline(always)]
+ pub fn values<'a>(&'a self) -> Values<'a, K, V> {
+ Values { inner: self.iter() }
+ }
+
+ /// Gets a mutable iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, String::from("hello"));
+ /// a.insert(2, String::from("goodbye"));
+ ///
+ /// for value in a.values_mut() {
+ /// value.push_str("!");
+ /// }
+ ///
+ /// let values: Vec<String> = a.values().cloned().collect();
+ /// assert_eq!(values, [String::from("hello!"),
+ /// String::from("goodbye!")]);
+ /// ```
+
+ #[inline(always)]
+ pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
+ ValuesMut {
+ inner: self.iter_mut(),
+ }
+ }
+
+ /// Returns the number of elements in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert_eq!(a.len(), 0);
+ /// a.insert(1, "a");
+ /// assert_eq!(a.len(), 1);
+ /// ```
+
+ #[inline(always)]
+ pub fn len(&self) -> usize {
+ self.length
+ }
+
+ /// Returns `true` if the map contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert!(a.is_empty());
+ /// a.insert(1, "a");
+ /// assert!(!a.is_empty());
+ /// ```
+
+ #[inline(always)]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl<'a, K: Ord, V> Entry<'a, K, V> {
+ /// Ensures a value is in the entry by inserting the default if empty, and returns
+ /// a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// ```
+
+ pub fn or_try_insert(self, default: V) -> Result<&'a mut V, TryReserveError> {
+ match self {
+ Occupied(entry) => Ok(entry.into_mut()),
+ Vacant(entry) => entry.try_insert(default),
+ }
+ }
+
+ /// Ensures a value is in the entry by inserting the result of the default function if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, String> = BTreeMap::new();
+ /// let s = "hoho".to_string();
+ ///
+ /// map.entry("poneyland").or_insert_with(|| s);
+ ///
+ /// assert_eq!(map["poneyland"], "hoho".to_string());
+ /// ```
+
+ pub fn or_try_insert_with<F: FnOnce() -> V>(
+ self,
+ default: F,
+ ) -> Result<&'a mut V, TryReserveError> {
+ match self {
+ Occupied(entry) => Ok(entry.into_mut()),
+ Vacant(entry) => entry.try_insert(default()),
+ }
+ }
+
+ /// Returns a reference to this entry's key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+
+ #[inline]
+ pub fn key(&self) -> &K {
+ match *self {
+ Occupied(ref entry) => entry.key(),
+ Vacant(ref entry) => entry.key(),
+ }
+ }
+
+ /// Provides in-place mutable access to an occupied entry before any
+ /// potential inserts into the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 42);
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 43);
+ /// ```
+
+ pub fn and_modify<F>(self, f: F) -> Self
+ where
+ F: FnOnce(&mut V),
+ {
+ match self {
+ Occupied(mut entry) => {
+ f(entry.get_mut());
+ Occupied(entry)
+ }
+ Vacant(entry) => Vacant(entry),
+ }
+ }
+}
+
+impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
+ /// Ensures a value is in the entry by inserting the default value if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # fn main() {
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
+ /// map.entry("poneyland").or_default();
+ ///
+ /// assert_eq!(map["poneyland"], None);
+ /// # }
+ /// ```
+ pub fn or_default(self) -> Result<&'a mut V, TryReserveError> {
+ match self {
+ Occupied(entry) => Ok(entry.into_mut()),
+ Vacant(entry) => entry.try_insert(Default::default()),
+ }
+ }
+}
+
+impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
+ /// Gets a reference to the key that would be used when inserting a value
+ /// through the VacantEntry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+
+ #[inline(always)]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Take ownership of the key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// if let Entry::Vacant(v) = map.entry("poneyland") {
+ /// v.into_key();
+ /// }
+ /// ```
+ #[inline(always)]
+ pub fn into_key(self) -> K {
+ self.key
+ }
+
+ /// Sets the value of the entry with the `VacantEntry`'s key,
+ /// and returns a mutable reference to it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// // count the number of occurrences of letters in the vec
+ /// for x in vec!["a","b","a","c","a","b"] {
+ /// *count.entry(x).or_insert(0) += 1;
+ /// }
+ ///
+ /// assert_eq!(count["a"], 3);
+ /// ```
+
+ pub fn try_insert(self, value: V) -> Result<&'a mut V, TryReserveError> {
+ *self.length += 1;
+
+ let out_ptr;
+
+ let mut ins_k;
+ let mut ins_v;
+ let mut ins_edge;
+
+ let mut cur_parent = match self.handle.insert(self.key, value)? {
+ (Fit(handle), _) => return Ok(handle.into_kv_mut().1),
+ (Split(left, k, v, right), ptr) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ out_ptr = ptr;
+ left.ascend().map_err(|n| n.into_root_mut())
+ }
+ };
+
+ loop {
+ match cur_parent {
+ Ok(parent) => match parent.insert(ins_k, ins_v, ins_edge)? {
+ Fit(_) => return Ok(unsafe { &mut *out_ptr }),
+ Split(left, k, v, right) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ cur_parent = left.ascend().map_err(|n| n.into_root_mut());
+ }
+ },
+ Err(root) => {
+ root.push_level()?.push(ins_k, ins_v, ins_edge);
+ return Ok(unsafe { &mut *out_ptr });
+ }
+ }
+ }
+ }
+}
+
+impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
+ /// Gets a reference to the key in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+
+ #[inline]
+ pub fn key(&self) -> &K {
+ self.handle.reborrow().into_kv().0
+ }
+
+ /// Take ownership of the key and value from the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// // We delete the entry from the map.
+ /// o.remove_entry();
+ /// }
+ ///
+ /// // If now try to get the value, it will panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+
+ #[inline]
+ pub fn remove_entry(self) -> (K, V) {
+ self.remove_kv()
+ }
+
+ /// Gets a reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.get(), &12);
+ /// }
+ /// ```
+
+ #[inline]
+ pub fn get(&self) -> &V {
+ self.handle.reborrow().into_kv().1
+ }
+
+ /// Gets a mutable reference to the value in the entry.
+ ///
+ /// If you need a reference to the `OccupiedEntry` that may outlive the
+ /// destruction of the `Entry` value, see [`into_mut`].
+ ///
+ /// [`into_mut`]: #method.into_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// *o.get_mut() += 10;
+ /// assert_eq!(*o.get(), 22);
+ ///
+ /// // We can use the same Entry multiple times.
+ /// *o.get_mut() += 2;
+ /// }
+ /// assert_eq!(map["poneyland"], 24);
+ /// ```
+ #[inline]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.handle.kv_mut().1
+ }
+
+ /// Converts the entry into a mutable reference to its value.
+ ///
+ /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+ ///
+ /// [`get_mut`]: #method.get_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// *o.into_mut() += 10;
+ /// }
+ /// assert_eq!(map["poneyland"], 22);
+ /// ```
+ #[inline]
+ pub fn into_mut(self) -> &'a mut V {
+ self.handle.into_kv_mut().1
+ }
+
+ /// Sets the value of the entry with the `OccupiedEntry`'s key,
+ /// and returns the entry's old value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// assert_eq!(o.insert(15), 12);
+ /// }
+ /// assert_eq!(map["poneyland"], 15);
+ /// ```
+ #[inline]
+ pub fn insert(&mut self, value: V) -> V {
+ mem::replace(self.get_mut(), value)
+ }
+
+ /// Takes the value of the entry out of the map, and returns it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.remove(), 12);
+ /// }
+ /// // If we try to get "poneyland"'s value, it'll panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+ #[inline]
+ pub fn remove(self) -> V {
+ self.remove_kv().1
+ }
+
+ fn remove_kv(self) -> (K, V) {
+ *self.length -= 1;
+
+ let (small_leaf, old_key, old_val) = match self.handle.force() {
+ Leaf(leaf) => {
+ let (hole, old_key, old_val) = leaf.remove();
+ (hole.into_node(), old_key, old_val)
+ }
+ Internal(mut internal) => {
+ let key_loc = internal.kv_mut().0 as *mut K;
+ let val_loc = internal.kv_mut().1 as *mut V;
+
+ let to_remove = first_leaf_edge(internal.right_edge().descend())
+ .right_kv()
+ .ok();
+ let to_remove = unsafe { unwrap_unchecked(to_remove) };
+
+ let (hole, key, val) = to_remove.remove();
+
+ let old_key = unsafe { mem::replace(&mut *key_loc, key) };
+ let old_val = unsafe { mem::replace(&mut *val_loc, val) };
+
+ (hole.into_node(), old_key, old_val)
+ }
+ };
+
+ // Handle underflow
+ let mut cur_node = small_leaf.forget_type();
+ while cur_node.len() < node::CAPACITY / 2 {
+ match handle_underfull_node(cur_node) {
+ AtRoot => break,
+ EmptyParent(_) => unreachable!(),
+ Merged(parent) => {
+ if parent.len() == 0 {
+ // We must be at the root
+ parent.into_root_mut().pop_level();
+ break;
+ } else {
+ cur_node = parent.forget_type();
+ }
+ }
+ Stole(_) => break,
+ }
+ }
+
+ (old_key, old_val)
+ }
+}
+
+enum UnderflowResult<'a, K, V> {
+ AtRoot,
+ EmptyParent(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+ Merged(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+ Stole(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+}
+
+fn handle_underfull_node<'a, K, V>(
+ node: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+) -> UnderflowResult<'a, K, V> {
+ let parent = if let Ok(parent) = node.ascend() {
+ parent
+ } else {
+ return AtRoot;
+ };
+
+ let (is_left, mut handle) = match parent.left_kv() {
+ Ok(left) => (true, left),
+ Err(parent) => match parent.right_kv() {
+ Ok(right) => (false, right),
+ Err(parent) => {
+ return EmptyParent(parent.into_node());
+ }
+ },
+ };
+
+ if handle.can_merge() {
+ Merged(handle.merge().into_node())
+ } else {
+ if is_left {
+ handle.steal_left();
+ } else {
+ handle.steal_right();
+ }
+ Stole(handle.into_node())
+ }
+}
+
+impl<K: Ord, V, I: Iterator<Item = (K, V)>> Iterator for MergeIter<K, V, I> {
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ let res = match (self.left.peek(), self.right.peek()) {
+ (Some(&(ref left_key, _)), Some(&(ref right_key, _))) => left_key.cmp(right_key),
+ (Some(_), None) => Ordering::Less,
+ (None, Some(_)) => Ordering::Greater,
+ (None, None) => return None,
+ };
+
+ // Check which elements comes first and only advance the corresponding iterator.
+ // If two keys are equal, take the value from `right`.
+ match res {
+ Ordering::Less => self.left.next(),
+ Ordering::Greater => self.right.next(),
+ Ordering::Equal => {
+ self.left.next();
+ self.right.next()
+ }
+ }
+ }
+}
diff --git a/third_party/rust/fallible_collections/src/btree/node.rs b/third_party/rust/fallible_collections/src/btree/node.rs
new file mode 100644
index 0000000000..249aeb6598
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/btree/node.rs
@@ -0,0 +1,1676 @@
+// This is an attempt at an implementation following the ideal
+//
+// ```
+// struct BTreeMap<K, V> {
+// height: usize,
+// root: Option<Box<Node<K, V, height>>>
+// }
+//
+// struct Node<K, V, height: usize> {
+// keys: [K; 2 * B - 1],
+// vals: [V; 2 * B - 1],
+// edges: if height > 0 {
+// [Box<Node<K, V, height - 1>>; 2 * B]
+// } else { () },
+// parent: *const Node<K, V, height + 1>,
+// parent_idx: u16,
+// len: u16,
+// }
+// ```
+//
+// Since Rust doesn't actually have dependent types and polymorphic recursion,
+// we make do with lots of unsafety.
+
+// A major goal of this module is to avoid complexity by treating the tree as a generic (if
+// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
+// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
+// even what underfull means. However, we do rely on a few invariants:
+//
+// - Trees must have uniform depth/height. This means that every path down to a leaf from a
+// given node has exactly the same length.
+// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
+// This implies that even an empty internal node has at least one edge.
+
+use core::marker::PhantomData;
+use core::mem::{self, MaybeUninit};
+use core::ptr::{self, NonNull, Unique};
+use core::slice;
+
+use crate::boxed::FallibleBox;
+use crate::TryReserveError;
+use alloc::alloc::{Allocator, Global, Layout};
+use alloc::boxed::Box;
+
+const B: usize = 6;
+pub const MIN_LEN: usize = B - 1;
+pub const CAPACITY: usize = 2 * B - 1;
+
+/// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
+/// these, since only the first `len` keys and values are assumed to be initialized. As such,
+/// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
+/// case.
+///
+/// We have a separate type for the header and rely on it matching the prefix of `LeafNode`, in
+/// order to statically allocate a single dummy node to avoid allocations. This struct is
+/// `repr(C)` to prevent them from being reordered. `LeafNode` does not just contain a
+/// `NodeHeader` because we do not want unnecessary padding between `len` and the keys.
+/// Crucially, `NodeHeader` can be safely transmuted to different K and V. (This is exploited
+/// by `as_header`.)
+/// See `into_key_slice` for an explanation of K2. K2 cannot be safely transmuted around
+/// because the size of `NodeHeader` depends on its alignment!
+#[repr(C)]
+struct NodeHeader<K, V, K2 = ()> {
+ /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
+ /// This either points to an actual node or is null.
+ parent: *const InternalNode<K, V>,
+
+ /// This node's index into the parent node's `edges` array.
+ /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
+ /// This is only guaranteed to be initialized when `parent` is non-null.
+ parent_idx: MaybeUninit<u16>,
+
+ /// The number of keys and values this node stores.
+ ///
+ /// This next to `parent_idx` to encourage the compiler to join `len` and
+ /// `parent_idx` into the same 32-bit word, reducing space overhead.
+ len: u16,
+
+ /// See `into_key_slice`.
+ keys_start: [K2; 0],
+}
+#[repr(C)]
+struct LeafNode<K, V> {
+ /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
+ /// This either points to an actual node or is null.
+ parent: *const InternalNode<K, V>,
+
+ /// This node's index into the parent node's `edges` array.
+ /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
+ /// This is only guaranteed to be initialized when `parent` is non-null.
+ parent_idx: MaybeUninit<u16>,
+
+ /// The number of keys and values this node stores.
+ ///
+ /// This next to `parent_idx` to encourage the compiler to join `len` and
+ /// `parent_idx` into the same 32-bit word, reducing space overhead.
+ len: u16,
+
+ /// The arrays storing the actual data of the node. Only the first `len` elements of each
+ /// array are initialized and valid.
+ keys: [MaybeUninit<K>; CAPACITY],
+ vals: [MaybeUninit<V>; CAPACITY],
+}
+
+impl<K, V> LeafNode<K, V> {
+ /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
+ /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
+ unsafe fn new() -> Self {
+ LeafNode {
+ // As a general policy, we leave fields uninitialized if they can be, as this should
+ // be both slightly faster and easier to track in Valgrind.
+ keys: MaybeUninit::uninit_array::<CAPACITY>(),
+ vals: MaybeUninit::uninit_array::<CAPACITY>(),
+ parent: ptr::null(),
+ parent_idx: MaybeUninit::uninit(),
+ len: 0,
+ }
+ }
+}
+
+impl<K, V> NodeHeader<K, V> {
+ fn is_shared_root(&self) -> bool {
+ ptr::eq(self, &EMPTY_ROOT_NODE as *const _ as *const _)
+ }
+}
+
+// We need to implement Sync here in order to make a static instance.
+unsafe impl Sync for NodeHeader<(), ()> {}
+
+// An empty node used as a placeholder for the root node, to avoid allocations.
+// We use just a header in order to save space, since no operation on an empty tree will
+// ever take a pointer past the first key.
+static EMPTY_ROOT_NODE: NodeHeader<(), ()> = NodeHeader {
+ parent: ptr::null(),
+ parent_idx: MaybeUninit::uninit(),
+ len: 0,
+ keys_start: [],
+};
+
+/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
+/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
+/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
+/// node, allowing code to act on leaf and internal nodes generically without having to even check
+/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
+#[repr(C)]
+struct InternalNode<K, V> {
+ data: LeafNode<K, V>,
+
+ /// The pointers to the children of this node. `len + 1` of these are considered
+ /// initialized and valid.
+ edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
+}
+
+impl<K, V> InternalNode<K, V> {
+ /// Creates a new `InternalNode`.
+ ///
+ /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
+ /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
+ /// edges are initialized and valid, meaning that even when the node is empty (having a
+ /// `len` of 0), there must be one initialized and valid edge. This function does not set up
+ /// such an edge.
+ unsafe fn new() -> Self {
+ InternalNode {
+ data: LeafNode::new(),
+ edges: MaybeUninit::uninit_array::<{ 2 * B }>(),
+ }
+ }
+}
+
+/// An owned pointer to a node. This basically is either `Box<LeafNode<K, V>>` or
+/// `Box<InternalNode<K, V>>`. However, it contains no information as to which of the two types
+/// of nodes is actually behind the box, and, partially due to this lack of information, has no
+/// destructor.
+struct BoxedNode<K, V> {
+ ptr: Unique<LeafNode<K, V>>,
+}
+
+impl<K, V> BoxedNode<K, V> {
+ fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
+ let (ptr, _g) = Box::into_unique(node);
+ BoxedNode { ptr: ptr }
+ }
+
+ fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
+ unsafe {
+ BoxedNode {
+ ptr: Unique::new_unchecked(Box::into_raw(node) as *mut LeafNode<K, V>),
+ }
+ }
+ }
+
+ unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
+ BoxedNode {
+ ptr: Unique::new_unchecked(ptr.as_ptr()),
+ }
+ }
+
+ fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
+ NonNull::from(self.ptr)
+ }
+}
+
+/// An owned tree. Note that despite being owned, this does not have a destructor,
+/// and must be cleaned up manually.
+pub struct Root<K, V> {
+ node: BoxedNode<K, V>,
+ height: usize,
+}
+
+unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> {}
+unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
+
+impl<K, V> Root<K, V> {
+ pub fn is_shared_root(&self) -> bool {
+ self.as_ref().is_shared_root()
+ }
+
+ pub fn shared_empty_root() -> Self {
+ Root {
+ node: unsafe {
+ BoxedNode::from_ptr(NonNull::new_unchecked(
+ &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V> as *mut _,
+ ))
+ },
+ height: 0,
+ }
+ }
+
+ pub fn new_leaf() -> Result<Self, TryReserveError> {
+ Ok(Root {
+ node: BoxedNode::from_leaf(<Box<_> as FallibleBox<_>>::try_new(unsafe {
+ LeafNode::new()
+ })?),
+ height: 0,
+ })
+ }
+
+ pub fn as_ref(&self) -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *const _ as *mut _,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn into_ref(self) -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: ptr::null_mut(), // FIXME: Is there anything better to do here?
+ _marker: PhantomData,
+ }
+ }
+
+ /// Adds a new internal node with a single edge, pointing to the previous root, and make that
+ /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
+ pub fn push_level(
+ &mut self,
+ ) -> Result<NodeRef<marker::Mut<'_>, K, V, marker::Internal>, TryReserveError> {
+ debug_assert!(!self.is_shared_root());
+ let mut new_node = <Box<_> as FallibleBox<_>>::try_new(unsafe { InternalNode::new() })?;
+ new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
+
+ self.node = BoxedNode::from_internal(new_node);
+ self.height += 1;
+
+ let mut ret = NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData,
+ };
+
+ unsafe {
+ ret.reborrow_mut().first_edge().correct_parent_link();
+ }
+
+ Ok(ret)
+ }
+
+ /// Removes the root node, using its first child as the new root. This cannot be called when
+ /// the tree consists only of a leaf node. As it is intended only to be called when the root
+ /// has only one edge, no cleanup is done on any of the other children are elements of the root.
+ /// This decreases the height by 1 and is the opposite of `push_level`.
+ pub fn pop_level(&mut self) {
+ debug_assert!(self.height > 0);
+
+ let top = self.node.ptr;
+
+ self.node = unsafe {
+ BoxedNode::from_ptr(
+ self.as_mut()
+ .cast_unchecked::<marker::Internal>()
+ .first_edge()
+ .descend()
+ .node,
+ )
+ };
+ self.height -= 1;
+ unsafe {
+ (*self.as_mut().as_leaf_mut()).parent = ptr::null();
+ }
+
+ unsafe {
+ Global.deallocate(
+ NonNull::from(top).cast(),
+ Layout::new::<InternalNode<K, V>>(),
+ );
+ }
+ }
+}
+
+// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
+// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
+// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
+// However, whenever a public type wraps `NodeRef`, make sure that it has the
+// correct variance.
+/// A reference to a node.
+///
+/// This type has a number of parameters that controls how it acts:
+/// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
+/// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
+/// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
+/// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
+/// - `K` and `V`: These control what types of things are stored in the nodes.
+/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
+/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
+/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
+/// `NodeRef` could be pointing to either type of node.
+/// Note that in case of a leaf node, this might still be the shared root! Only turn
+/// this into a `LeafNode` reference if you know it is not a root! Shared references
+/// must be dereferencable *for the entire size of their pointee*, so `&InternalNode`
+/// pointing to the shared root is UB.
+/// Turning this into a `NodeHeader` is always safe.
+pub struct NodeRef<BorrowType, K, V, Type> {
+ height: usize,
+ node: NonNull<LeafNode<K, V>>,
+ // This is null unless the borrow type is `Mut`
+ root: *const Root<K, V>,
+ _marker: PhantomData<(BorrowType, Type)>,
+}
+
+impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
+impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
+
+unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+ fn as_internal(&self) -> &InternalNode<K, V> {
+ unsafe { &*(self.node.as_ptr() as *mut InternalNode<K, V>) }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
+ unsafe { &mut *(self.node.as_ptr() as *mut InternalNode<K, V>) }
+ }
+}
+
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Finds the length of the node. This is the number of keys or values. In an
+ /// internal node, the number of edges is `len() + 1`.
+ pub fn len(&self) -> usize {
+ self.as_header().len as usize
+ }
+
+ /// Returns the height of this node in the whole tree. Zero height denotes the
+ /// leaf level.
+ pub fn height(&self) -> usize {
+ self.height
+ }
+
+ /// Removes any static information about whether this node is a `Leaf` or an
+ /// `Internal` node.
+ pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ }
+ }
+
+ /// Temporarily takes out another, immutable reference to the same node.
+ fn reborrow<'a>(&'a self) -> NodeRef<marker::Immut<'a>, K, V, Type> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ }
+ }
+
+ /// Assert that this is indeed a proper leaf node, and not the shared root.
+ unsafe fn as_leaf(&self) -> &LeafNode<K, V> {
+ self.node.as_ref()
+ }
+
+ fn as_header(&self) -> &NodeHeader<K, V> {
+ unsafe { &*(self.node.as_ptr() as *const NodeHeader<K, V>) }
+ }
+
+ pub fn is_shared_root(&self) -> bool {
+ self.as_header().is_shared_root()
+ }
+
+ pub fn keys(&self) -> &[K] {
+ self.reborrow().into_key_slice()
+ }
+
+ fn vals(&self) -> &[V] {
+ self.reborrow().into_val_slice()
+ }
+
+ /// Finds the parent of the current node. Returns `Ok(handle)` if the current
+ /// node actually has a parent, where `handle` points to the edge of the parent
+ /// that points to the current node. Returns `Err(self)` if the current node has
+ /// no parent, giving back the original `NodeRef`.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn ascend(
+ self,
+ ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
+ let parent_as_leaf = self.as_header().parent as *const LeafNode<K, V>;
+ if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
+ Ok(Handle {
+ node: NodeRef {
+ height: self.height + 1,
+ node: non_zero,
+ root: self.root,
+ _marker: PhantomData,
+ },
+ idx: unsafe { usize::from(*self.as_header().parent_idx.as_ptr()) },
+ _marker: PhantomData,
+ })
+ } else {
+ Err(self)
+ }
+ }
+
+ pub fn first_edge(self) -> Handle<Self, marker::Edge> {
+ Handle::new_edge(self, 0)
+ }
+
+ pub fn last_edge(self) -> Handle<Self, marker::Edge> {
+ let len = self.len();
+ Handle::new_edge(self, len)
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn first_kv(self) -> Handle<Self, marker::KV> {
+ debug_assert!(self.len() > 0);
+ Handle::new_kv(self, 0)
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn last_kv(self) -> Handle<Self, marker::KV> {
+ let len = self.len();
+ debug_assert!(len > 0);
+ Handle::new_kv(self, len - 1)
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocate the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
+ pub unsafe fn deallocate_and_ascend(
+ self,
+ ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
+ debug_assert!(!self.is_shared_root());
+ let node = self.node;
+ let ret = self.ascend().ok();
+ Global.deallocate(node.cast(), Layout::new::<LeafNode<K, V>>());
+ ret
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocate the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
+ pub unsafe fn deallocate_and_ascend(
+ self,
+ ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
+ let node = self.node;
+ let ret = self.ascend().ok();
+ Global.deallocate(node.cast(), Layout::new::<InternalNode<K, V>>());
+ ret
+ }
+}
+
+impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Unsafely asserts to the compiler some static information about whether this
+ /// node is a `Leaf`.
+ unsafe fn cast_unchecked<NewType>(&mut self) -> NodeRef<marker::Mut<'_>, K, V, NewType> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ }
+ }
+
+ /// Temporarily takes out another, mutable reference to the same node. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+ unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ }
+ }
+
+ /// Returns a raw ptr to avoid asserting exclusive access to the entire node.
+ fn as_leaf_mut(&mut self) -> *mut LeafNode<K, V> {
+ // We are mutable, so we cannot be the root, so accessing this as a leaf is okay.
+ self.node.as_ptr()
+ }
+
+ fn keys_mut(&mut self) -> &mut [K] {
+ unsafe { self.reborrow_mut().into_key_slice_mut() }
+ }
+
+ fn vals_mut(&mut self) -> &mut [V] {
+ unsafe { self.reborrow_mut().into_val_slice_mut() }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn into_key_slice(self) -> &'a [K] {
+ // We have to be careful here because we might be pointing to the shared root.
+ // In that case, we must not create an `&LeafNode`. We could just return
+ // an empty slice whenever the length is 0 (this includes the shared root),
+ // but we want to avoid that run-time check.
+ // Instead, we create a slice pointing into the node whenever possible.
+ // We can sometimes do this even for the shared root, as the slice will be
+ // empty. We cannot *always* do this because if the type is too highly
+ // aligned, the offset of `keys` in a "full node" might be outside the bounds
+ // of the header! So we do an alignment check first, that will be
+ // evaluated at compile-time, and only do any run-time check in the rare case
+ // that the alignment is very big.
+ if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
+ &[]
+ } else {
+ // Thanks to the alignment check above, we know that `keys` will be
+ // in-bounds of some allocation even if this is the shared root!
+ // (We might be one-past-the-end, but that is allowed by LLVM.)
+ // Getting the pointer is tricky though. `NodeHeader` does not have a `keys`
+ // field because we want its size to not depend on the alignment of `K`
+ // (needed becuase `as_header` should be safe). We cannot call `as_leaf`
+ // because we might be the shared root.
+ // For this reason, `NodeHeader` has this `K2` parameter (that's usually `()`
+ // and hence just adds a size-0-align-1 field, not affecting layout).
+ // We know that we can transmute `NodeHeader<K, V, ()>` to `NodeHeader<K, V, K>`
+ // because we did the alignment check above, and hence `NodeHeader<K, V, K>`
+ // is not bigger than `NodeHeader<K, V, ()>`! Then we can use `NodeHeader<K, V, K>`
+ // to compute the pointer where the keys start.
+ // This entire hack will become unnecessary once
+ // <https://github.com/rust-lang/rfcs/pull/2582> lands, then we can just take a raw
+ // pointer to the `keys` field of `*const InternalNode<K, V>`.
+
+ // This is a non-debug-assert because it can be completely compile-time evaluated.
+ assert!(mem::size_of::<NodeHeader<K, V>>() == mem::size_of::<NodeHeader<K, V, K>>());
+ let header = self.as_header() as *const _ as *const NodeHeader<K, V, K>;
+ let keys = unsafe { &(*header).keys_start as *const _ as *const K };
+ unsafe { slice::from_raw_parts(keys, self.len()) }
+ }
+ }
+
+ fn into_val_slice(self) -> &'a [V] {
+ debug_assert!(!self.is_shared_root());
+ // We cannot be the root, so `as_leaf` is okay
+ unsafe {
+ slice::from_raw_parts(MaybeUninit::slice_as_ptr(&self.as_leaf().vals), self.len())
+ }
+ }
+
+ fn into_slices(self) -> (&'a [K], &'a [V]) {
+ let k = unsafe { ptr::read(&self) };
+ (k.into_key_slice(), self.into_val_slice())
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Gets a mutable reference to the root itself. This is useful primarily when the
+ /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
+ pub fn into_root_mut(self) -> &'a mut Root<K, V> {
+ unsafe { &mut *(self.root as *mut Root<K, V>) }
+ }
+
+ fn into_key_slice_mut(mut self) -> &'a mut [K] {
+ // Same as for `into_key_slice` above, we try to avoid a run-time check
+ // (the alignment comparison will usually be performed at compile-time).
+ if mem::align_of::<K>() > mem::align_of::<LeafNode<(), ()>>() && self.is_shared_root() {
+ &mut []
+ } else {
+ unsafe {
+ slice::from_raw_parts_mut(
+ MaybeUninit::slice_as_mut_ptr(&mut (*self.as_leaf_mut()).keys),
+ self.len(),
+ )
+ }
+ }
+ }
+
+ fn into_val_slice_mut(mut self) -> &'a mut [V] {
+ debug_assert!(!self.is_shared_root());
+ unsafe {
+ slice::from_raw_parts_mut(
+ MaybeUninit::slice_as_mut_ptr(&mut (*self.as_leaf_mut()).vals),
+ self.len(),
+ )
+ }
+ }
+
+ fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
+ debug_assert!(!self.is_shared_root());
+ // We cannot use the getters here, because calling the second one
+ // invalidates the reference returned by the first.
+ // More precisely, it is the call to `len` that is the culprit,
+ // because that creates a shared reference to the header, which *can*
+ // overlap with the keys (and even the values, for ZST keys).
+ unsafe {
+ let len = self.len();
+ let leaf = self.as_leaf_mut();
+ let keys =
+ slice::from_raw_parts_mut(MaybeUninit::slice_as_mut_ptr(&mut (*leaf).keys), len);
+ let vals =
+ slice::from_raw_parts_mut(MaybeUninit::slice_as_mut_ptr(&mut (*leaf).vals), len);
+ (keys, vals)
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+ /// Adds a key/value pair the end of the node.
+ pub fn push(&mut self, key: K, val: V) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() < CAPACITY);
+ debug_assert!(!self.is_shared_root());
+
+ let idx = self.len();
+
+ unsafe {
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+
+ (*self.as_leaf_mut()).len += 1;
+ }
+ }
+
+ /// Adds a key/value pair to the beginning of the node.
+ pub fn push_front(&mut self, key: K, val: V) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() < CAPACITY);
+ debug_assert!(!self.is_shared_root());
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
+
+ (*self.as_leaf_mut()).len += 1;
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// Adds a key/value pair and an edge to go to the right of that pair to
+ /// the end of the node.
+ pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.height - 1);
+ debug_assert!(self.len() < CAPACITY);
+
+ let idx = self.len();
+
+ unsafe {
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+ self.as_internal_mut()
+ .edges
+ .get_unchecked_mut(idx + 1)
+ .write(edge.node);
+
+ (*self.as_leaf_mut()).len += 1;
+
+ Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
+ }
+ }
+
+ fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
+ for i in first..after_last {
+ Handle::new_edge(unsafe { self.reborrow_mut() }, i).correct_parent_link();
+ }
+ }
+
+ fn correct_all_childrens_parent_links(&mut self) {
+ let len = self.len();
+ self.correct_childrens_parent_links(0, len + 1);
+ }
+
+ /// Adds a key/value pair and an edge to go to the left of that pair to
+ /// the beginning of the node.
+ pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.height - 1);
+ debug_assert!(self.len() < CAPACITY);
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
+ slice_insert(
+ slice::from_raw_parts_mut(
+ MaybeUninit::slice_as_mut_ptr(&mut self.as_internal_mut().edges),
+ self.len() + 1,
+ ),
+ 0,
+ edge.node,
+ );
+
+ (*self.as_leaf_mut()).len += 1;
+
+ self.correct_all_childrens_parent_links();
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Removes a key/value pair from the end of this node. If this is an internal node,
+ /// also removes the edge that was to the right of that pair.
+ pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() > 0);
+
+ let idx = self.len() - 1;
+
+ unsafe {
+ let key = ptr::read(self.keys().get_unchecked(idx));
+ let val = ptr::read(self.vals().get_unchecked(idx));
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(internal) => {
+ let edge =
+ ptr::read(internal.as_internal().edges.get_unchecked(idx + 1).as_ptr());
+ let mut new_root = Root {
+ node: edge,
+ height: internal.height - 1,
+ };
+ (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
+ Some(new_root)
+ }
+ };
+
+ (*self.as_leaf_mut()).len -= 1;
+ (key, val, edge)
+ }
+ }
+
+ /// Removes a key/value pair from the beginning of this node. If this is an internal node,
+ /// also removes the edge that was to the left of that pair.
+ pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() > 0);
+
+ let old_len = self.len();
+
+ unsafe {
+ let key = slice_remove(self.keys_mut(), 0);
+ let val = slice_remove(self.vals_mut(), 0);
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(mut internal) => {
+ let edge = slice_remove(
+ slice::from_raw_parts_mut(
+ MaybeUninit::slice_as_mut_ptr(&mut internal.as_internal_mut().edges),
+ old_len + 1,
+ ),
+ 0,
+ );
+
+ let mut new_root = Root {
+ node: edge,
+ height: internal.height - 1,
+ };
+ (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
+
+ for i in 0..old_len {
+ Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
+ }
+
+ Some(new_root)
+ }
+ };
+
+ (*self.as_leaf_mut()).len -= 1;
+
+ (key, val, edge)
+ }
+ }
+
+ fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
+ (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr())
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Checks whether a node is an `Internal` node or a `Leaf` node.
+ pub fn force(
+ self,
+ ) -> ForceResult<
+ NodeRef<BorrowType, K, V, marker::Leaf>,
+ NodeRef<BorrowType, K, V, marker::Internal>,
+ > {
+ if self.height == 0 {
+ ForceResult::Leaf(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ })
+ } else {
+ ForceResult::Internal(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ })
+ }
+ }
+}
+
+/// A reference to a specific key/value pair or edge within a node. The `Node` parameter
+/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
+/// pair) or `Edge` (signifying a handle on an edge).
+///
+/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
+/// a child node, these represent the spaces where child pointers would go between the key/value
+/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
+/// to the left of the node, one between the two pairs, and one at the right of the node.
+pub struct Handle<Node, Type> {
+ node: Node,
+ idx: usize,
+ _marker: PhantomData<Type>,
+}
+
+impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
+// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
+// `Clone`able is when it is an immutable reference and therefore `Copy`.
+impl<Node: Copy, Type> Clone for Handle<Node, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<Node, Type> Handle<Node, Type> {
+ /// Retrieves the node that contains the edge of key/value pair this handle points to.
+ pub fn into_node(self) -> Node {
+ self.node
+ }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
+ /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`.
+ pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ // Necessary for correctness, but in a private module
+ debug_assert!(idx < node.len());
+
+ Handle {
+ node,
+ idx,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ Handle::new_edge(self.node, self.idx)
+ }
+
+ pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ Handle::new_edge(self.node, self.idx + 1)
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq
+ for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+ fn eq(&self, other: &Self) -> bool {
+ self.node.node == other.node.node && self.idx == other.idx
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+ /// Temporarily takes out another, immutable handle on the same location.
+ pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle {
+ node: self.node.reborrow(),
+ idx: self.idx,
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
+ /// Temporarily takes out another, mutable handle on the same location. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+ pub unsafe fn reborrow_mut(
+ &mut self,
+ ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle {
+ node: self.node.reborrow_mut(),
+ idx: self.idx,
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
+ /// `node.len()`.
+ pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ // Necessary for correctness, but in a private module
+ debug_assert!(idx <= node.len());
+
+ Handle {
+ node,
+ idx,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+ if self.idx > 0 {
+ Ok(Handle::new_kv(self.node, self.idx - 1))
+ } else {
+ Err(self)
+ }
+ }
+
+ pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+ if self.idx < self.node.len() {
+ Ok(Handle::new_kv(self.node, self.idx))
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method assumes that there is enough space in the node for the new
+ /// pair to fit.
+ ///
+ /// The returned pointer points to the inserted value.
+ fn insert_fit(&mut self, key: K, val: V) -> *mut V {
+ // Necessary for correctness, but in a private module
+ debug_assert!(self.node.len() < CAPACITY);
+ debug_assert!(!self.node.is_shared_root());
+
+ unsafe {
+ slice_insert(self.node.keys_mut(), self.idx, key);
+ slice_insert(self.node.vals_mut(), self.idx, val);
+
+ (*self.node.as_leaf_mut()).len += 1;
+
+ self.node.vals_mut().get_unchecked_mut(self.idx)
+ }
+ }
+
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method splits the node if there isn't enough room.
+ ///
+ /// The returned pointer points to the inserted value.
+ pub fn insert(
+ mut self,
+ key: K,
+ val: V,
+ ) -> Result<(InsertResult<'a, K, V, marker::Leaf>, *mut V), TryReserveError> {
+ if self.node.len() < CAPACITY {
+ let ptr = self.insert_fit(key, val);
+ Ok((InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr))
+ } else {
+ let middle = Handle::new_kv(self.node, B);
+ let (mut left, k, v, mut right) = middle.split()?;
+ let ptr = if self.idx <= B {
+ unsafe { Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val) }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Leaf>(),
+ self.idx - (B + 1),
+ )
+ .insert_fit(key, val)
+ }
+ };
+ Ok((InsertResult::Split(left, k, v, right), ptr))
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ /// Fixes the parent pointer and index in the child node below this edge. This is useful
+ /// when the ordering of edges has been changed, such as in the various `insert` methods.
+ fn correct_parent_link(mut self) {
+ let idx = self.idx as u16;
+ let ptr = self.node.as_internal_mut() as *mut _;
+ let mut child = self.descend();
+ unsafe {
+ (*child.as_leaf_mut()).parent = ptr;
+ (*child.as_leaf_mut()).parent_idx.write(idx);
+ }
+ }
+
+ /// Unsafely asserts to the compiler some static information about whether the underlying
+ /// node of this handle is a `Leaf`.
+ unsafe fn cast_unchecked<NewType>(
+ &mut self,
+ ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
+ Handle::new_edge(self.node.cast_unchecked(), self.idx)
+ }
+
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method assumes
+ /// that there is enough space in the node for the new pair to fit.
+ fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but in an internal module
+ debug_assert!(self.node.len() < CAPACITY);
+ debug_assert!(edge.height == self.node.height - 1);
+
+ unsafe {
+ // This cast is a lie, but it allows us to reuse the key/value insertion logic.
+ self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
+
+ slice_insert(
+ slice::from_raw_parts_mut(
+ MaybeUninit::slice_as_mut_ptr(&mut self.node.as_internal_mut().edges),
+ self.node.len(),
+ ),
+ self.idx + 1,
+ edge.node,
+ );
+
+ for i in (self.idx + 1)..(self.node.len() + 1) {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ }
+ }
+
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method splits
+ /// the node if there isn't enough room.
+ pub fn insert(
+ mut self,
+ key: K,
+ val: V,
+ edge: Root<K, V>,
+ ) -> Result<InsertResult<'a, K, V, marker::Internal>, TryReserveError> {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.node.height - 1);
+
+ if self.node.len() < CAPACITY {
+ self.insert_fit(key, val, edge);
+ Ok(InsertResult::Fit(Handle::new_kv(self.node, self.idx)))
+ } else {
+ let middle = Handle::new_kv(self.node, B);
+ let (mut left, k, v, mut right) = middle.split()?;
+ if self.idx <= B {
+ unsafe {
+ Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
+ }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Internal>(),
+ self.idx - (B + 1),
+ )
+ .insert_fit(key, val, edge);
+ }
+ }
+ Ok(InsertResult::Split(left, k, v, right))
+ }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+ /// Finds the node pointed to by this edge.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.node.height - 1,
+ node: unsafe {
+ (&*self
+ .node
+ .as_internal()
+ .edges
+ .get_unchecked(self.idx)
+ .as_ptr())
+ .as_ptr()
+ },
+ root: self.node.root,
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn into_kv(self) -> (&'a K, &'a V) {
+ let (keys, vals) = self.node.into_slices();
+ unsafe { (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx)) }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
+ let (keys, vals) = self.node.into_slices_mut();
+ unsafe {
+ (
+ keys.get_unchecked_mut(self.idx),
+ vals.get_unchecked_mut(self.idx),
+ )
+ }
+ }
+}
+
+impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
+ unsafe {
+ let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
+ (
+ keys.get_unchecked_mut(self.idx),
+ vals.get_unchecked_mut(self.idx),
+ )
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the key/value pairs to the right of
+ /// this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the key/value pairs to the right of this handle are put into a newly
+ /// allocated node.
+ pub fn split(
+ mut self,
+ ) -> Result<
+ (
+ NodeRef<marker::Mut<'a>, K, V, marker::Leaf>,
+ K,
+ V,
+ Root<K, V>,
+ ),
+ TryReserveError,
+ > {
+ debug_assert!(!self.node.is_shared_root());
+ unsafe {
+ let mut new_node = <Box<_> as FallibleBox<_>>::try_new(LeafNode::new())?;
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().add(self.idx + 1),
+ new_node.keys.as_mut_ptr() as *mut K,
+ new_len,
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().add(self.idx + 1),
+ new_node.vals.as_mut_ptr() as *mut V,
+ new_len,
+ );
+
+ (*self.node.as_leaf_mut()).len = self.idx as u16;
+ new_node.len = new_len as u16;
+
+ Ok((
+ self.node,
+ k,
+ v,
+ Root {
+ node: BoxedNode::from_leaf(new_node),
+ height: 0,
+ },
+ ))
+ }
+ }
+
+ /// Removes the key/value pair pointed to by this handle, returning the edge between the
+ /// now adjacent key/value pairs to the left and right of this handle.
+ pub fn remove(
+ mut self,
+ ) -> (
+ Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ K,
+ V,
+ ) {
+ debug_assert!(!self.node.is_shared_root());
+ unsafe {
+ let k = slice_remove(self.node.keys_mut(), self.idx);
+ let v = slice_remove(self.node.vals_mut(), self.idx);
+ (*self.node.as_leaf_mut()).len -= 1;
+ (self.left_edge(), k, v)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the edges and key/value pairs to the
+ /// right of this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the edges and key/value pairs to the right of this handle are put into
+ /// a newly allocated node.
+ pub fn split(
+ mut self,
+ ) -> Result<
+ (
+ NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
+ K,
+ V,
+ Root<K, V>,
+ ),
+ TryReserveError,
+ > {
+ unsafe {
+ let mut new_node = <Box<_> as FallibleBox<_>>::try_new(InternalNode::new())?;
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let height = self.node.height;
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().add(self.idx + 1),
+ new_node.data.keys.as_mut_ptr() as *mut K,
+ new_len,
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().add(self.idx + 1),
+ new_node.data.vals.as_mut_ptr() as *mut V,
+ new_len,
+ );
+ ptr::copy_nonoverlapping(
+ self.node.as_internal().edges.as_ptr().add(self.idx + 1),
+ new_node.edges.as_mut_ptr(),
+ new_len + 1,
+ );
+
+ (*self.node.as_leaf_mut()).len = self.idx as u16;
+ new_node.data.len = new_len as u16;
+
+ let mut new_root = Root {
+ node: BoxedNode::from_internal(new_node),
+ height,
+ };
+
+ for i in 0..(new_len + 1) {
+ Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
+ }
+
+ Ok((self.node, k, v, new_root))
+ }
+ }
+
+ /// Returns `true` if it is valid to call `.merge()`, i.e., whether there is enough room in
+ /// a node to hold the combination of the nodes to the left and right of this handle along
+ /// with the key/value pair at this handle.
+ pub fn can_merge(&self) -> bool {
+ (self.reborrow().left_edge().descend().len()
+ + self.reborrow().right_edge().descend().len()
+ + 1)
+ <= CAPACITY
+ }
+
+ /// Combines the node immediately to the left of this handle, the key/value pair pointed
+ /// to by this handle, and the node immediately to the right of this handle into one new
+ /// child of the underlying node, returning an edge referencing that new child.
+ ///
+ /// Assumes that this edge `.can_merge()`.
+ pub fn merge(
+ mut self,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ let self1 = unsafe { ptr::read(&self) };
+ let self2 = unsafe { ptr::read(&self) };
+ let mut left_node = self1.left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = self2.right_edge().descend();
+ let right_len = right_node.len();
+
+ // necessary for correctness, but in a private module
+ debug_assert!(left_len + right_len + 1 <= CAPACITY);
+
+ unsafe {
+ ptr::write(
+ left_node.keys_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.keys_mut(), self.idx),
+ );
+ ptr::copy_nonoverlapping(
+ right_node.keys().as_ptr(),
+ left_node.keys_mut().as_mut_ptr().add(left_len + 1),
+ right_len,
+ );
+ ptr::write(
+ left_node.vals_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.vals_mut(), self.idx),
+ );
+ ptr::copy_nonoverlapping(
+ right_node.vals().as_ptr(),
+ left_node.vals_mut().as_mut_ptr().add(left_len + 1),
+ right_len,
+ );
+
+ slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
+ for i in self.idx + 1..self.node.len() {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ (*self.node.as_leaf_mut()).len -= 1;
+
+ (*left_node.as_leaf_mut()).len += right_len as u16 + 1;
+
+ if self.node.height > 1 {
+ ptr::copy_nonoverlapping(
+ right_node.cast_unchecked().as_internal().edges.as_ptr(),
+ left_node
+ .cast_unchecked()
+ .as_internal_mut()
+ .edges
+ .as_mut_ptr()
+ .add(left_len + 1),
+ right_len + 1,
+ );
+
+ for i in left_len + 1..left_len + right_len + 2 {
+ Handle::new_edge(left_node.cast_unchecked().reborrow_mut(), i)
+ .correct_parent_link();
+ }
+
+ Global.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
+ } else {
+ Global.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
+ }
+
+ Handle::new_edge(self.node, self.idx)
+ }
+ }
+
+ /// This removes a key/value pair from the left child and replaces it with the key/value pair
+ /// pointed to by this handle while pushing the old key/value pair of this handle into the right
+ /// child.
+ pub fn steal_left(&mut self) {
+ unsafe {
+ let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
+
+ let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+ let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+ match self.reborrow_mut().right_edge().descend().force() {
+ ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
+ ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()),
+ }
+ }
+ }
+
+ /// This removes a key/value pair from the right child and replaces it with the key/value pair
+ /// pointed to by this handle while pushing the old key/value pair of this handle into the left
+ /// child.
+ pub fn steal_right(&mut self) {
+ unsafe {
+ let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
+
+ let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+ let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+ match self.reborrow_mut().left_edge().descend().force() {
+ ForceResult::Leaf(mut leaf) => leaf.push(k, v),
+ ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()),
+ }
+ }
+ }
+
+ /// This does stealing similar to `steal_left` but steals multiple elements at once.
+ pub fn bulk_steal_left(&mut self, count: usize) {
+ unsafe {
+ let mut left_node = ptr::read(self).left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = ptr::read(self).right_edge().descend();
+ let right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ debug_assert!(right_len + count <= CAPACITY);
+ debug_assert!(left_len >= count);
+
+ let new_left_len = left_len - count;
+
+ // Move data.
+ {
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+ let parent_kv = {
+ let kv = self.reborrow_mut().into_kv_mut();
+ (kv.0 as *mut K, kv.1 as *mut V)
+ };
+
+ // Make room for stolen elements in the right child.
+ ptr::copy(right_kv.0, right_kv.0.add(count), right_len);
+ ptr::copy(right_kv.1, right_kv.1.add(count), right_len);
+
+ // Move elements from the left child to the right one.
+ move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
+
+ // Move parent's key/value pair to the right child.
+ move_kv(parent_kv, 0, right_kv, count - 1, 1);
+
+ // Move the left-most stolen pair to the parent.
+ move_kv(left_kv, new_left_len, parent_kv, 0, 1);
+ }
+
+ (*left_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
+ (*right_node.reborrow_mut().as_leaf_mut()).len += count as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+ // Make room for stolen edges.
+ let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+ ptr::copy(right_edges, right_edges.add(count), right_len + 1);
+ right.correct_childrens_parent_links(count, count + right_len + 1);
+
+ move_edges(left, new_left_len + 1, right, 0, count);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+
+ /// The symmetric clone of `bulk_steal_left`.
+ pub fn bulk_steal_right(&mut self, count: usize) {
+ unsafe {
+ let mut left_node = ptr::read(self).left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = ptr::read(self).right_edge().descend();
+ let right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ debug_assert!(left_len + count <= CAPACITY);
+ debug_assert!(right_len >= count);
+
+ let new_right_len = right_len - count;
+
+ // Move data.
+ {
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+ let parent_kv = {
+ let kv = self.reborrow_mut().into_kv_mut();
+ (kv.0 as *mut K, kv.1 as *mut V)
+ };
+
+ // Move parent's key/value pair to the left child.
+ move_kv(parent_kv, 0, left_kv, left_len, 1);
+
+ // Move elements from the right child to the left one.
+ move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
+
+ // Move the right-most stolen pair to the parent.
+ move_kv(right_kv, count - 1, parent_kv, 0, 1);
+
+ // Fix right indexing
+ ptr::copy(right_kv.0.add(count), right_kv.0, new_right_len);
+ ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len);
+ }
+
+ (*left_node.reborrow_mut().as_leaf_mut()).len += count as u16;
+ (*right_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+ move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
+
+ // Fix right indexing.
+ let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+ ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
+ right.correct_childrens_parent_links(0, new_right_len + 1);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+}
+
+unsafe fn move_kv<K, V>(
+ source: (*mut K, *mut V),
+ source_offset: usize,
+ dest: (*mut K, *mut V),
+ dest_offset: usize,
+ count: usize,
+) {
+ ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
+ ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
+}
+
+// Source and destination must have the same height.
+unsafe fn move_edges<K, V>(
+ mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+ source_offset: usize,
+ mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+ dest_offset: usize,
+ count: usize,
+) {
+ let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
+ let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
+ ptr::copy_nonoverlapping(
+ source_ptr.add(source_offset),
+ dest_ptr.add(dest_offset),
+ count,
+ );
+ dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
+}
+
+impl<BorrowType, K, V, HandleType>
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType>
+{
+ /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
+ pub fn force(
+ self,
+ ) -> ForceResult<
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>,
+ > {
+ match self.node.force() {
+ ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
+ node,
+ idx: self.idx,
+ _marker: PhantomData,
+ }),
+ ForceResult::Internal(node) => ForceResult::Internal(Handle {
+ node,
+ idx: self.idx,
+ _marker: PhantomData,
+ }),
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+ /// Move the suffix after `self` from one node to another one. `right` must be empty.
+ /// The first edge of `right` remains unchanged.
+ pub fn move_suffix(
+ &mut self,
+ right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+ ) {
+ unsafe {
+ let left_new_len = self.idx;
+ let mut left_node = self.reborrow_mut().into_node();
+
+ let right_new_len = left_node.len() - left_new_len;
+ let mut right_node = right.reborrow_mut();
+
+ debug_assert!(right_node.len() == 0);
+ debug_assert!(left_node.height == right_node.height);
+
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+
+ move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
+
+ (*left_node.reborrow_mut().as_leaf_mut()).len = left_new_len as u16;
+ (*right_node.reborrow_mut().as_leaf_mut()).len = right_new_len as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(right)) => {
+ move_edges(left, left_new_len + 1, right, 1, right_new_len);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+}
+
+pub enum ForceResult<Leaf, Internal> {
+ Leaf(Leaf),
+ Internal(Internal),
+}
+
+pub enum InsertResult<'a, K, V, Type> {
+ Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
+ Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>),
+}
+
+pub mod marker {
+ use core::marker::PhantomData;
+
+ pub enum Leaf {}
+ pub enum Internal {}
+ pub enum LeafOrInternal {}
+
+ pub enum Owned {}
+ pub struct Immut<'a>(PhantomData<&'a ()>);
+ pub struct Mut<'a>(PhantomData<&'a mut ()>);
+
+ pub enum KV {}
+ pub enum Edge {}
+}
+
+unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
+ ptr::copy(
+ slice.as_ptr().add(idx),
+ slice.as_mut_ptr().add(idx + 1),
+ slice.len() - idx,
+ );
+ ptr::write(slice.get_unchecked_mut(idx), val);
+}
+
+unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
+ let ret = ptr::read(slice.get_unchecked(idx));
+ ptr::copy(
+ slice.as_ptr().add(idx + 1),
+ slice.as_mut_ptr().add(idx),
+ slice.len() - idx - 1,
+ );
+ ret
+}
diff --git a/third_party/rust/fallible_collections/src/btree/search.rs b/third_party/rust/fallible_collections/src/btree/search.rs
new file mode 100644
index 0000000000..0031fdc29c
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/btree/search.rs
@@ -0,0 +1,66 @@
+use core::borrow::Borrow;
+
+use core::cmp::Ordering;
+
+use super::node::{marker, ForceResult::*, Handle, NodeRef};
+
+use SearchResult::*;
+
+pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
+ Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
+ GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>),
+}
+
+pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
+ mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ key: &Q,
+) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ loop {
+ match search_node(node, key) {
+ Found(handle) => return Found(handle),
+ GoDown(handle) => match handle.force() {
+ Leaf(leaf) => return GoDown(leaf),
+ Internal(internal) => {
+ node = internal.descend();
+ continue;
+ }
+ },
+ }
+ }
+}
+
+pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
+ node: NodeRef<BorrowType, K, V, Type>,
+ key: &Q,
+) -> SearchResult<BorrowType, K, V, Type, Type>
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ match search_linear(&node, key) {
+ (idx, true) => Found(Handle::new_kv(node, idx)),
+ (idx, false) => SearchResult::GoDown(Handle::new_edge(node, idx)),
+ }
+}
+
+pub fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
+ node: &NodeRef<BorrowType, K, V, Type>,
+ key: &Q,
+) -> (usize, bool)
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ for (i, k) in node.keys().iter().enumerate() {
+ match key.cmp(k.borrow()) {
+ Ordering::Greater => {}
+ Ordering::Equal => return (i, true),
+ Ordering::Less => return (i, false),
+ }
+ }
+ (node.keys().len(), false)
+}
diff --git a/third_party/rust/fallible_collections/src/btree/set.rs b/third_party/rust/fallible_collections/src/btree/set.rs
new file mode 100644
index 0000000000..c6112ee6cd
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/btree/set.rs
@@ -0,0 +1,1346 @@
+// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
+// to TreeMap
+
+use crate::TryReserveError;
+use core::borrow::Borrow;
+use core::cmp::max;
+use core::cmp::Ordering::{self, Equal, Greater, Less};
+use core::fmt::{self, Debug};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};
+
+use super::map::{self, BTreeMap, Keys};
+use super::Recover;
+
+// FIXME(conventions): implement bounded iterators
+
+/// A set based on a B-Tree.
+///
+/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance
+/// benefits and drawbacks.
+///
+/// It is a logic error for an item to be modified in such a way that the item's ordering relative
+/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`Ord`]: ../../std/cmp/trait.Ord.html
+/// [`Cell`]: ../../std/cell/struct.Cell.html
+/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeSet;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BTreeSet<&str>` in this example).
+/// let mut books = BTreeSet::new();
+///
+/// // Add some books.
+/// books.insert("A Dance With Dragons");
+/// books.insert("To Kill a Mockingbird");
+/// books.insert("The Odyssey");
+/// books.insert("The Great Gatsby");
+///
+/// // Check for a specific one.
+/// if !books.contains("The Winds of Winter") {
+/// println!("We have {} books, but The Winds of Winter ain't one.",
+/// books.len());
+/// }
+///
+/// // Remove a book.
+/// books.remove("The Odyssey");
+///
+/// // Iterate over everything.
+/// for book in &books {
+/// println!("{}", book);
+/// }
+/// ```
+#[derive(Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]
+
+pub struct BTreeSet<T> {
+ map: BTreeMap<T, ()>,
+}
+
+/// An iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`iter`]: struct.BTreeSet.html#method.iter
+
+pub struct Iter<'a, T: 'a> {
+ iter: Keys<'a, T, ()>,
+}
+
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Iter").field(&self.iter.clone()).finish()
+ }
+}
+
+/// An owning iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`][`BTreeSet`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`into_iter`]: struct.BTreeSet.html#method.into_iter
+
+#[derive(Debug)]
+pub struct IntoIter<T> {
+ iter: map::IntoIter<T, ()>,
+}
+
+/// An iterator over a sub-range of items in a `BTreeSet`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`range`]: struct.BTreeSet.html#method.range
+#[derive(Debug)]
+
+pub struct Range<'a, T: 'a> {
+ iter: map::Range<'a, T, ()>,
+}
+
+/// A lazy iterator producing elements in the difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`difference`]: struct.BTreeSet.html#method.difference
+
+pub struct Difference<'a, T: 'a> {
+ inner: DifferenceInner<'a, T>,
+}
+enum DifferenceInner<'a, T: 'a> {
+ Stitch {
+ self_iter: Iter<'a, T>,
+ other_iter: Peekable<Iter<'a, T>>,
+ },
+ Search {
+ self_iter: Iter<'a, T>,
+ other_set: &'a BTreeSet<T>,
+ },
+}
+
+impl<T: fmt::Debug> fmt::Debug for Difference<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.inner {
+ DifferenceInner::Stitch {
+ self_iter,
+ other_iter,
+ } => f
+ .debug_tuple("Difference")
+ .field(&self_iter)
+ .field(&other_iter)
+ .finish(),
+ DifferenceInner::Search {
+ self_iter,
+ other_set: _,
+ } => f.debug_tuple("Difference").field(&self_iter).finish(),
+ }
+ }
+}
+
+/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`symmetric_difference`] method on
+/// [`BTreeSet`]. See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`symmetric_difference`]: struct.BTreeSet.html#method.symmetric_difference
+
+pub struct SymmetricDifference<'a, T: 'a> {
+ a: Peekable<Iter<'a, T>>,
+ b: Peekable<Iter<'a, T>>,
+}
+
+impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("SymmetricDifference")
+ .field(&self.a)
+ .field(&self.b)
+ .finish()
+ }
+}
+
+/// A lazy iterator producing elements in the intersection of `BTreeSet`s.
+///
+/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`intersection`]: struct.BTreeSet.html#method.intersection
+
+pub struct Intersection<'a, T: 'a> {
+ inner: IntersectionInner<'a, T>,
+}
+enum IntersectionInner<'a, T: 'a> {
+ Stitch {
+ small_iter: Iter<'a, T>, // for size_hint, should be the smaller of the sets
+ other_iter: Iter<'a, T>,
+ },
+ Search {
+ small_iter: Iter<'a, T>,
+ large_set: &'a BTreeSet<T>,
+ },
+}
+
+impl<T: fmt::Debug> fmt::Debug for Intersection<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.inner {
+ IntersectionInner::Stitch {
+ small_iter,
+ other_iter,
+ } => f
+ .debug_tuple("Intersection")
+ .field(&small_iter)
+ .field(&other_iter)
+ .finish(),
+ IntersectionInner::Search {
+ small_iter,
+ large_set: _,
+ } => f.debug_tuple("Intersection").field(&small_iter).finish(),
+ }
+ }
+}
+
+/// A lazy iterator producing elements in the union of `BTreeSet`s.
+///
+/// This `struct` is created by the [`union`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`BTreeSet`]: struct.BTreeSet.html
+/// [`union`]: struct.BTreeSet.html#method.union
+
+pub struct Union<'a, T: 'a> {
+ a: Peekable<Iter<'a, T>>,
+ b: Peekable<Iter<'a, T>>,
+}
+
+impl<T: fmt::Debug> fmt::Debug for Union<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Union")
+ .field(&self.a)
+ .field(&self.b)
+ .finish()
+ }
+}
+
+// This constant is used by functions that compare two sets.
+// It estimates the relative size at which searching performs better
+// than iterating, based on the benchmarks in
+// https://github.com/ssomers/rust_bench_btreeset_intersection;
+// It's used to divide rather than multiply sizes, to rule out overflow,
+// and it's a power of two to make that division cheap.
+const ITER_PERFORMANCE_TIPPING_SIZE_DIFF: usize = 16;
+
+impl<T: Ord> BTreeSet<T> {
+ /// Makes a new `BTreeSet` with a reasonable choice of B.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<i32> = BTreeSet::new();
+ /// ```
+
+ #[inline]
+ pub fn new() -> BTreeSet<T> {
+ BTreeSet {
+ map: BTreeMap::new(),
+ }
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the set.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(3);
+ /// set.insert(5);
+ /// set.insert(8);
+ /// for &elem in set.range((Included(&4), Included(&8))) {
+ /// println!("{}", elem);
+ /// }
+ /// assert_eq!(Some(&5), set.range(4..).next());
+ /// ```
+
+ #[inline]
+ pub fn range<K: ?Sized, R>(&self, range: R) -> Range<'_, T>
+ where
+ K: Ord,
+ T: Borrow<K>,
+ R: RangeBounds<K>,
+ {
+ Range {
+ iter: self.map.range(range),
+ }
+ }
+
+ /// Visits the values representing the difference,
+ /// i.e., the values that are in `self` but not in `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let diff: Vec<_> = a.difference(&b).cloned().collect();
+ /// assert_eq!(diff, [1]);
+ /// ```
+
+ pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
+ if self.len() > other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
+ // Self is bigger than or not much smaller than other set.
+ // Iterate both sets jointly, spotting matches along the way.
+ Difference {
+ inner: DifferenceInner::Stitch {
+ self_iter: self.iter(),
+ other_iter: other.iter().peekable(),
+ },
+ }
+ } else {
+ // Self is much smaller than other set, or both sets are empty.
+ // Iterate the small set, searching for matches in the large set.
+ Difference {
+ inner: DifferenceInner::Search {
+ self_iter: self.iter(),
+ other_set: other,
+ },
+ }
+ }
+ }
+
+ /// Visits the values representing the symmetric difference,
+ /// i.e., the values that are in `self` or in `other` but not in both,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect();
+ /// assert_eq!(sym_diff, [1, 3]);
+ /// ```
+
+ #[inline]
+ pub fn symmetric_difference<'a>(
+ &'a self,
+ other: &'a BTreeSet<T>,
+ ) -> SymmetricDifference<'a, T> {
+ SymmetricDifference {
+ a: self.iter().peekable(),
+ b: other.iter().peekable(),
+ }
+ }
+
+ /// Visits the values representing the intersection,
+ /// i.e., the values that are both in `self` and `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let intersection: Vec<_> = a.intersection(&b).cloned().collect();
+ /// assert_eq!(intersection, [2]);
+ /// ```
+
+ pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T> {
+ let (small, other) = if self.len() <= other.len() {
+ (self, other)
+ } else {
+ (other, self)
+ };
+ if small.len() > other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
+ // Small set is not much smaller than other set.
+ // Iterate both sets jointly, spotting matches along the way.
+ Intersection {
+ inner: IntersectionInner::Stitch {
+ small_iter: small.iter(),
+ other_iter: other.iter(),
+ },
+ }
+ } else {
+ // Big difference in number of elements, or both sets are empty.
+ // Iterate the small set, searching for matches in the large set.
+ Intersection {
+ inner: IntersectionInner::Search {
+ small_iter: small.iter(),
+ large_set: other,
+ },
+ }
+ }
+ }
+
+ /// Visits the values representing the union,
+ /// i.e., all the values in `self` or `other`, without duplicates,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ ///
+ /// let union: Vec<_> = a.union(&b).cloned().collect();
+ /// assert_eq!(union, [1, 2]);
+ /// ```
+
+ #[inline]
+ pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T> {
+ Union {
+ a: self.iter().peekable(),
+ b: other.iter().peekable(),
+ }
+ }
+
+ /// Clears the set, removing all values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// v.insert(1);
+ /// v.clear();
+ /// assert!(v.is_empty());
+ /// ```
+
+ #[inline(always)]
+ pub fn clear(&mut self) {
+ self.map.clear()
+ }
+
+ /// Returns `true` if the set contains a value.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.contains(&1), true);
+ /// assert_eq!(set.contains(&4), false);
+ /// ```
+
+ #[inline(always)]
+ pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ self.map.contains_key(value)
+ }
+
+ /// Returns a reference to the value in the set, if any, that is equal to the given value.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.get(&2), Some(&2));
+ /// assert_eq!(set.get(&4), None);
+ /// ```
+
+ #[inline(always)]
+ pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ Recover::get(&self.map, value)
+ }
+
+ /// Returns `true` if `self` has no elements in common with `other`.
+ /// This is equivalent to checking for an empty intersection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// let mut b = BTreeSet::new();
+ ///
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(4);
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(1);
+ /// assert_eq!(a.is_disjoint(&b), false);
+ /// ```
+
+ #[inline]
+ pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool {
+ self.intersection(other).next().is_none()
+ }
+
+ /// Returns `true` if the set is a subset of another,
+ /// i.e., `other` contains at least all the values in `self`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sup: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(2);
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(4);
+ /// assert_eq!(set.is_subset(&sup), false);
+ /// ```
+
+ pub fn is_subset(&self, other: &BTreeSet<T>) -> bool {
+ // Same result as self.difference(other).next().is_none()
+ // but the 3 paths below are faster (in order: hugely, 20%, 5%).
+ if self.len() > other.len() {
+ false
+ } else if self.len() > other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
+ // Self is not much smaller than other set.
+ // Stolen from TreeMap
+ let mut x = self.iter();
+ let mut y = other.iter();
+ let mut a = x.next();
+ let mut b = y.next();
+ while a.is_some() {
+ if b.is_none() {
+ return false;
+ }
+
+ let a1 = a.unwrap();
+ let b1 = b.unwrap();
+
+ match b1.cmp(a1) {
+ Less => (),
+ Greater => return false,
+ Equal => a = x.next(),
+ }
+
+ b = y.next();
+ }
+ true
+ } else {
+ // Big difference in number of elements, or both sets are empty.
+ // Iterate the small set, searching for matches in the large set.
+ for next in self {
+ if !other.contains(next) {
+ return false;
+ }
+ }
+ true
+ }
+ }
+
+ /// Returns `true` if the set is a superset of another,
+ /// i.e., `self` contains at least all the values in `other`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sub: BTreeSet<_> = [1, 2].iter().cloned().collect();
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(0);
+ /// set.insert(1);
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.is_superset(&sub), true);
+ /// ```
+
+ #[inline(always)]
+ pub fn is_superset(&self, other: &BTreeSet<T>) -> bool {
+ other.is_subset(self)
+ }
+
+ /// Adds a value to the set.
+ ///
+ /// If the set did not have this value present, `true` is returned.
+ ///
+ /// If the set did have this value present, `false` is returned, and the
+ /// entry is not updated. See the [module-level documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.insert(2), true);
+ /// assert_eq!(set.insert(2), false);
+ /// assert_eq!(set.len(), 1);
+ /// ```
+
+ #[inline]
+ pub fn try_insert(&mut self, value: T) -> Result<bool, TryReserveError> {
+ Ok(self.map.try_insert(value, ())?.is_none())
+ }
+
+ /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
+ /// one. Returns the replaced value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(Vec::<i32>::new());
+ ///
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+ /// set.replace(Vec::with_capacity(10));
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+ /// ```
+
+ #[inline]
+ pub fn replace(&mut self, value: T) -> Result<Option<T>, TryReserveError> {
+ Ok(Recover::replace(&mut self.map, value)?)
+ }
+
+ /// Removes a value from the set. Returns whether the value was
+ /// present in the set.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.remove(&2), true);
+ /// assert_eq!(set.remove(&2), false);
+ /// ```
+
+ #[inline(always)]
+ pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ self.map.remove(value).is_some()
+ }
+
+ /// Removes and returns the value in the set, if any, that is equal to the given one.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.take(&2), Some(2));
+ /// assert_eq!(set.take(&2), None);
+ /// ```
+
+ #[inline(always)]
+ pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ Recover::take(&mut self.map, value)
+ }
+
+ /// Moves all elements from `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(3);
+ /// b.insert(4);
+ /// b.insert(5);
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ /// assert!(a.contains(&3));
+ /// assert!(a.contains(&4));
+ /// assert!(a.contains(&5));
+ /// ```
+
+ #[inline(always)]
+ pub fn append(&mut self, other: &mut Self) {
+ self.map.append(&mut other.map);
+ }
+
+ /// Splits the collection into two at the given key. Returns everything after the given key,
+ /// including the key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ /// a.insert(17);
+ /// a.insert(41);
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ ///
+ /// assert!(b.contains(&3));
+ /// assert!(b.contains(&17));
+ /// assert!(b.contains(&41));
+ /// ```
+
+ #[inline]
+ pub fn try_split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Result<Self, TryReserveError>
+ where
+ T: Borrow<Q>,
+ {
+ Ok(BTreeSet {
+ map: self.map.split_off(key)?,
+ })
+ }
+}
+
+impl<T> BTreeSet<T> {
+ /// Gets an iterator that visits the values in the `BTreeSet` in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [1, 2, 3].iter().cloned().collect();
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+ ///
+ /// Values returned by the iterator are returned in ascending order:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [3, 1, 2].iter().cloned().collect();
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+
+ #[inline(always)]
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter {
+ iter: self.map.keys(),
+ }
+ }
+
+ /// Returns the number of elements in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert_eq!(v.len(), 0);
+ /// v.insert(1);
+ /// assert_eq!(v.len(), 1);
+ /// ```
+
+ #[inline(always)]
+ pub fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ /// Returns `true` if the set contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert!(v.is_empty());
+ /// v.insert(1);
+ /// assert!(!v.is_empty());
+ /// ```
+
+ #[inline(always)]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl<T: Ord> FromIterator<T> for BTreeSet<T> {
+ #[inline]
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BTreeSet<T> {
+ let mut set = BTreeSet::new();
+ set.extend(iter);
+ set
+ }
+}
+
+impl<T> IntoIterator for BTreeSet<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Gets an iterator for moving out the `BTreeSet`'s contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().cloned().collect();
+ ///
+ /// let v: Vec<_> = set.into_iter().collect();
+ /// assert_eq!(v, [1, 2, 3, 4]);
+ /// ```
+ #[inline(always)]
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter {
+ iter: self.map.into_iter(),
+ }
+ }
+}
+
+impl<'a, T> IntoIterator for &'a BTreeSet<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ #[inline(always)]
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+impl<T: Ord> Extend<T> for BTreeSet<T> {
+ #[inline]
+ fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
+ iter.into_iter().for_each(move |elem| {
+ self.try_insert(elem).expect("Out of Mem");
+ });
+ }
+}
+
+impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+}
+
+impl<T: Ord> Default for BTreeSet<T> {
+ /// Makes an empty `BTreeSet<T>` with a reasonable choice of B.
+ #[inline(always)]
+ fn default() -> BTreeSet<T> {
+ BTreeSet::new()
+ }
+}
+
+impl<T: Ord + Clone> Sub<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+ ///
+ /// let result = &a - &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 2]);
+ /// ```
+ fn sub(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.difference(rhs).cloned().collect()
+ }
+}
+
+impl<T: Ord + Clone> BitXor<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+ ///
+ /// let result = &a ^ &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 4]);
+ /// ```
+ fn bitxor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.symmetric_difference(rhs).cloned().collect()
+ }
+}
+
+impl<T: Ord + Clone> BitAnd<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+ ///
+ /// let result = &a & &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [2, 3]);
+ /// ```
+ fn bitand(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.intersection(rhs).cloned().collect()
+ }
+}
+
+impl<T: Ord + Clone> BitOr<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+ ///
+ /// let result = &a | &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 2, 3, 4, 5]);
+ /// ```
+ fn bitor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.union(rhs).cloned().collect()
+ }
+}
+
+impl<T: Debug> Debug for BTreeSet<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+impl<T> Clone for Iter<'_, T> {
+ #[inline(always)]
+ fn clone(&self) -> Self {
+ Iter {
+ iter: self.iter.clone(),
+ }
+ }
+}
+
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline(always)]
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline(always)]
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back()
+ }
+}
+
+impl<T> ExactSizeIterator for Iter<'_, T> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.iter.len()
+ }
+}
+
+impl<T> FusedIterator for Iter<'_, T> {}
+
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+
+impl<T> ExactSizeIterator for IntoIter<T> {
+ #[inline(always)]
+ fn len(&self) -> usize {
+ self.iter.len()
+ }
+}
+
+impl<T> FusedIterator for IntoIter<T> {}
+
+impl<T> Clone for Range<'_, T> {
+ #[inline(always)]
+ fn clone(&self) -> Self {
+ Range {
+ iter: self.iter.clone(),
+ }
+ }
+}
+
+impl<'a, T> Iterator for Range<'a, T> {
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+}
+
+impl<'a, T> DoubleEndedIterator for Range<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+
+impl<T> FusedIterator for Range<'_, T> {}
+
+/// Compares `x` and `y`, but return `short` if x is None and `long` if y is None
+fn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering) -> Ordering {
+ match (x, y) {
+ (None, _) => short,
+ (_, None) => long,
+ (Some(x1), Some(y1)) => x1.cmp(y1),
+ }
+}
+
+impl<T> Clone for Difference<'_, T> {
+ fn clone(&self) -> Self {
+ Difference {
+ inner: match &self.inner {
+ DifferenceInner::Stitch {
+ self_iter,
+ other_iter,
+ } => DifferenceInner::Stitch {
+ self_iter: self_iter.clone(),
+ other_iter: other_iter.clone(),
+ },
+ DifferenceInner::Search {
+ self_iter,
+ other_set,
+ } => DifferenceInner::Search {
+ self_iter: self_iter.clone(),
+ other_set,
+ },
+ },
+ }
+ }
+}
+
+impl<'a, T: Ord> Iterator for Difference<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match &mut self.inner {
+ DifferenceInner::Stitch {
+ self_iter,
+ other_iter,
+ } => {
+ let mut self_next = self_iter.next()?;
+ loop {
+ match other_iter
+ .peek()
+ .map_or(Less, |other_next| Ord::cmp(self_next, other_next))
+ {
+ Less => return Some(self_next),
+ Equal => {
+ self_next = self_iter.next()?;
+ other_iter.next();
+ }
+ Greater => {
+ other_iter.next();
+ }
+ }
+ }
+ }
+ DifferenceInner::Search {
+ self_iter,
+ other_set,
+ } => loop {
+ let self_next = self_iter.next()?;
+ if !other_set.contains(&self_next) {
+ return Some(self_next);
+ }
+ },
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (self_len, other_len) = match &self.inner {
+ DifferenceInner::Stitch {
+ self_iter,
+ other_iter,
+ } => (self_iter.len(), other_iter.len()),
+ DifferenceInner::Search {
+ self_iter,
+ other_set,
+ } => (self_iter.len(), other_set.len()),
+ };
+ (self_len.saturating_sub(other_len), Some(self_len))
+ }
+}
+
+impl<T: Ord> FusedIterator for Difference<'_, T> {}
+
+impl<T> Clone for SymmetricDifference<'_, T> {
+ fn clone(&self) -> Self {
+ SymmetricDifference {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ }
+ }
+}
+
+impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
+ Less => return self.a.next(),
+ Equal => {
+ self.a.next();
+ self.b.next();
+ }
+ Greater => return self.b.next(),
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(self.a.len() + self.b.len()))
+ }
+}
+
+impl<T: Ord> FusedIterator for SymmetricDifference<'_, T> {}
+
+impl<T> Clone for Intersection<'_, T> {
+ fn clone(&self) -> Self {
+ Intersection {
+ inner: match &self.inner {
+ IntersectionInner::Stitch {
+ small_iter,
+ other_iter,
+ } => IntersectionInner::Stitch {
+ small_iter: small_iter.clone(),
+ other_iter: other_iter.clone(),
+ },
+ IntersectionInner::Search {
+ small_iter,
+ large_set,
+ } => IntersectionInner::Search {
+ small_iter: small_iter.clone(),
+ large_set,
+ },
+ },
+ }
+ }
+}
+
+impl<'a, T: Ord> Iterator for Intersection<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match &mut self.inner {
+ IntersectionInner::Stitch {
+ small_iter,
+ other_iter,
+ } => {
+ let mut small_next = small_iter.next()?;
+ let mut other_next = other_iter.next()?;
+ loop {
+ match Ord::cmp(small_next, other_next) {
+ Less => small_next = small_iter.next()?,
+ Greater => other_next = other_iter.next()?,
+ Equal => return Some(small_next),
+ }
+ }
+ }
+ IntersectionInner::Search {
+ small_iter,
+ large_set,
+ } => loop {
+ let small_next = small_iter.next()?;
+ if large_set.contains(&small_next) {
+ return Some(small_next);
+ }
+ },
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let min_len = match &self.inner {
+ IntersectionInner::Stitch { small_iter, .. } => small_iter.len(),
+ IntersectionInner::Search { small_iter, .. } => small_iter.len(),
+ };
+ (0, Some(min_len))
+ }
+}
+
+impl<T: Ord> FusedIterator for Intersection<'_, T> {}
+
+impl<T> Clone for Union<'_, T> {
+ #[inline]
+ fn clone(&self) -> Self {
+ Union {
+ a: self.a.clone(),
+ b: self.b.clone(),
+ }
+ }
+}
+
+impl<'a, T: Ord> Iterator for Union<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match cmp_opt(self.a.peek(), self.b.peek(), Greater, Less) {
+ Less => self.a.next(),
+ Equal => {
+ self.b.next();
+ self.a.next()
+ }
+ Greater => self.b.next(),
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let a_len = self.a.len();
+ let b_len = self.b.len();
+ (max(a_len, b_len), Some(a_len + b_len))
+ }
+}
+
+impl<T: Ord> FusedIterator for Union<'_, T> {}
diff --git a/third_party/rust/fallible_collections/src/format.rs b/third_party/rust/fallible_collections/src/format.rs
new file mode 100644
index 0000000000..47a5fb9082
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/format.rs
@@ -0,0 +1,46 @@
+//! A try_format! macro replacing format!
+use super::FallibleVec;
+use crate::TryReserveError;
+use alloc::fmt::{Arguments, Write};
+use alloc::string::String;
+
+/// Take a max capacity a try allocating a string with it.
+///
+/// # Warning:
+///
+/// the max capacity must be > to the formating of the
+/// arguments. If writing the argument on the string exceed the
+/// capacity, no error is return and an allocation can occurs which
+/// can lead to a panic
+pub fn try_format(max_capacity: usize, args: Arguments<'_>) -> Result<String, TryReserveError> {
+ let v = FallibleVec::try_with_capacity(max_capacity)?;
+ let mut s = String::from_utf8(v).expect("wtf an empty vec should be valid utf8");
+ s.write_fmt(args)
+ .expect("a formatting trait implementation returned an error");
+ Ok(s)
+}
+
+#[macro_export]
+/// Take a max capacity a try allocating a string with it.
+///
+/// # Warning:
+///
+/// the max capacity must be > to the formating of the
+/// arguments. If writing the argument on the string exceed the
+/// capacity, no error is return and an allocation can occurs which
+/// can lead to a panic
+macro_rules! tryformat {
+ ($max_capacity:tt, $($arg:tt)*) => (
+ $crate::format::try_format($max_capacity, format_args!($($arg)*))
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ #[test]
+ fn format() {
+ assert_eq!(tryformat!(1, "1").unwrap(), format!("1"));
+ assert_eq!(tryformat!(1, "{}", 1).unwrap(), format!("{}", 1));
+ assert_eq!(tryformat!(3, "{}", 123).unwrap(), format!("{}", 123));
+ }
+}
diff --git a/third_party/rust/fallible_collections/src/hashmap.rs b/third_party/rust/fallible_collections/src/hashmap.rs
new file mode 100644
index 0000000000..b55361e7a7
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/hashmap.rs
@@ -0,0 +1,139 @@
+//! Implement Fallible HashMap
+use super::TryClone;
+use crate::TryReserveError;
+use core::borrow::Borrow;
+use core::default::Default;
+use core::fmt::Debug;
+use core::hash::Hash;
+
+#[cfg(not(all(feature = "std", feature = "rust_1_57")))]
+type HashMap<K, V> = hashbrown::hash_map::HashMap<K, V>;
+
+#[cfg(all(feature = "std", feature = "rust_1_57"))]
+type HashMap<K, V> = std::collections::HashMap<K, V>;
+
+#[cfg(not(all(feature = "std", feature = "rust_1_57")))]
+use hashbrown::hash_map::{Iter, IntoIter};
+
+#[cfg(all(feature = "std", feature = "rust_1_57"))]
+use std::collections::hash_map::{Iter, IntoIter};
+
+pub struct TryHashMap<K, V> {
+ inner: HashMap<K, V>,
+}
+
+impl<K, V> Default for TryHashMap<K, V> {
+ #[inline(always)]
+ fn default() -> Self {
+ Self {
+ inner: Default::default(),
+ }
+ }
+}
+
+impl<K: Debug, V: Debug> Debug for TryHashMap<K, V> {
+ #[inline]
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl<K, V> TryHashMap<K, V>
+where
+ K: Eq + Hash,
+{
+ #[inline]
+ pub fn with_capacity(capacity: usize) -> Result<Self, TryReserveError> {
+ let mut map = Self {
+ inner: HashMap::new(),
+ };
+ map.reserve(capacity)?;
+ Ok(map)
+ }
+
+ #[inline(always)]
+ pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.inner.get(k)
+ }
+
+ #[inline]
+ pub fn insert(&mut self, k: K, v: V) -> Result<Option<V>, TryReserveError> {
+ self.reserve(if self.inner.capacity() == 0 { 4 } else { 1 })?;
+ Ok(self.inner.insert(k, v))
+ }
+
+ #[inline(always)]
+ pub fn iter(&self) -> Iter<'_, K, V> {
+ self.inner.iter()
+ }
+
+ #[inline(always)]
+ pub fn len(&self) -> usize {
+ self.inner.len()
+ }
+
+ #[inline(always)]
+ pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq,
+ {
+ self.inner.remove(k)
+ }
+
+ #[inline(always)]
+ fn reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ #[cfg(not(all(feature = "std", feature = "rust_1_57")))]
+ {
+ self.inner.try_reserve(additional)
+ }
+
+ #[cfg(all(feature = "std", feature = "rust_1_57"))]
+ {
+ self.inner.try_reserve(additional).map_err(|_| {
+ crate::make_try_reserve_error(self.len(), additional,
+ core::mem::size_of::<K>() + core::mem::size_of::<V>(),
+ core::mem::align_of::<K>().max(core::mem::align_of::<V>()),
+ )
+ })
+ }
+ }
+}
+
+impl<K, V> IntoIterator for TryHashMap<K, V> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
+
+ #[inline(always)]
+ fn into_iter(self) -> Self::IntoIter {
+ self.inner.into_iter()
+ }
+}
+
+impl<K, V> TryClone for TryHashMap<K, V>
+where
+ K: Eq + Hash + TryClone,
+ V: TryClone,
+{
+ fn try_clone(&self) -> Result<Self, TryReserveError> {
+ let mut clone = Self::with_capacity(self.inner.len())?;
+
+ for (key, value) in self.inner.iter() {
+ clone.insert(key.try_clone()?, value.try_clone()?)?;
+ }
+
+ Ok(clone)
+ }
+}
+
+#[test]
+fn tryhashmap_oom() {
+ match TryHashMap::<char, char>::default().reserve(core::usize::MAX) {
+ Ok(_) => panic!("it should be OOM"),
+ _ => (),
+ }
+}
diff --git a/third_party/rust/fallible_collections/src/lib.rs b/third_party/rust/fallible_collections/src/lib.rs
new file mode 100644
index 0000000000..45dcd48d17
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/lib.rs
@@ -0,0 +1,91 @@
+//! impl Fallible collections on allocation errors, quite as describe
+//! in [RFC 2116](https://github.com/rust-lang/rfcs/blob/master/text/2116-alloc-me-maybe.md)
+//! This was used in the turbofish OS hobby project to mitigate the
+//! the lack of faillible allocation in rust.
+//!
+//! The `Try*` types in this module are thin wrappers around the stdlib types to add
+//! support for fallible allocation. The API differences from the stdlib types ensure
+//! that all operations which allocate return a `Result`. For the most part, this simply
+//! means adding a `Result` return value to functions which return nothing or a
+//! non-`Result` value. However, these types implement some traits whose API cannot
+//! communicate failure, but which do require allocation, so it is important that these
+//! wrapper types do not implement these traits.
+//!
+//! Specifically, these types must not implement any of the following traits:
+//! - Clone
+//! - Extend
+//! - From
+//! - FromIterator
+//!
+//! This list may not be exhaustive. Exercise caution when implementing
+//! any new traits to ensure they won't potentially allocate in a way that
+//! can't return a Result to indicate allocation failure.
+
+#![cfg_attr(not(test), no_std)]
+#![cfg_attr(all(feature = "unstable", not(feature = "rust_1_57")), feature(try_reserve))]
+#![cfg_attr(feature = "unstable", feature(min_specialization))]
+#![cfg_attr(feature = "unstable", feature(allocator_api))]
+#![cfg_attr(feature = "unstable", feature(dropck_eyepatch))]
+#![cfg_attr(feature = "unstable", feature(ptr_internals))]
+#![cfg_attr(feature = "unstable", feature(core_intrinsics))]
+#![cfg_attr(all(feature = "unstable", not(feature = "rust_1_57")), feature(maybe_uninit_ref))]
+#![cfg_attr(feature = "unstable", feature(maybe_uninit_slice))]
+#![cfg_attr(feature = "unstable", feature(maybe_uninit_extra))]
+#![cfg_attr(feature = "unstable", feature(maybe_uninit_uninit_array))]
+extern crate alloc;
+#[cfg(feature = "std")]
+extern crate std;
+
+pub mod boxed;
+pub use boxed::*;
+#[macro_use]
+pub mod vec;
+pub use vec::*;
+pub mod rc;
+pub use rc::*;
+pub mod arc;
+pub use arc::*;
+#[cfg(feature = "unstable")]
+pub mod btree;
+#[cfg(not(feature = "unstable"))]
+pub mod hashmap;
+#[cfg(not(feature = "unstable"))]
+pub use hashmap::*;
+#[macro_use]
+pub mod format;
+pub mod try_clone;
+
+#[cfg(all(feature = "unstable", not(feature = "rust_1_57")))]
+pub use alloc::collections::TryReserveError;
+#[cfg(not(all(feature = "unstable", not(feature = "rust_1_57"))))]
+pub use hashbrown::TryReserveError;
+
+#[cfg(feature = "std_io")]
+pub use vec::std_io::*;
+
+/// trait for trying to clone an elem, return an error instead of
+/// panic if allocation failed
+/// # Examples
+///
+/// ```
+/// use fallible_collections::TryClone;
+/// let mut vec = vec![42, 100];
+/// assert_eq!(vec.try_clone().unwrap(), vec)
+/// ```
+pub trait TryClone {
+ /// try clone method, (Self must be sized because of Result
+ /// constraint)
+ fn try_clone(&self) -> Result<Self, TryReserveError>
+ where
+ Self: core::marker::Sized;
+}
+
+#[cfg(feature = "rust_1_57")]
+fn make_try_reserve_error(len: usize, additional: usize, elem_size: usize, align: usize) -> hashbrown::TryReserveError {
+ if let Some(size) = len.checked_add(additional).and_then(|l| l.checked_mul(elem_size)) {
+ if let Ok(layout) = alloc::alloc::Layout::from_size_align(size, align) {
+ return TryReserveError::AllocError { layout }
+ }
+ }
+ TryReserveError::CapacityOverflow
+}
diff --git a/third_party/rust/fallible_collections/src/rc.rs b/third_party/rust/fallible_collections/src/rc.rs
new file mode 100644
index 0000000000..4fc16dc2af
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/rc.rs
@@ -0,0 +1,35 @@
+//! Implement a Fallible Rc
+use super::FallibleBox;
+use crate::TryReserveError;
+use alloc::boxed::Box;
+use alloc::rc::Rc;
+/// trait to implement Fallible Rc
+pub trait FallibleRc<T> {
+ /// try creating a new Rc, returning a Result<Box<T>,
+ /// TryReserveError> if allocation failed
+ fn try_new(t: T) -> Result<Self, TryReserveError>
+ where
+ Self: Sized;
+}
+
+impl<T> FallibleRc<T> for Rc<T> {
+ fn try_new(t: T) -> Result<Self, TryReserveError> {
+ let b = <Box<T> as FallibleBox<T>>::try_new(t)?;
+ Ok(Rc::from(b))
+ }
+}
+
+#[cfg(test)]
+mod test {
+ #[test]
+ fn fallible_rc() {
+ use std::rc::Rc;
+
+ let mut x = Rc::new(3);
+ *Rc::get_mut(&mut x).unwrap() = 4;
+ assert_eq!(*x, 4);
+
+ let _y = Rc::clone(&x);
+ assert!(Rc::get_mut(&mut x).is_none());
+ }
+}
diff --git a/third_party/rust/fallible_collections/src/try_clone.rs b/third_party/rust/fallible_collections/src/try_clone.rs
new file mode 100644
index 0000000000..a8ff0442b4
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/try_clone.rs
@@ -0,0 +1,39 @@
+//! this module implements try clone for primitive rust types
+
+use super::TryClone;
+use crate::TryReserveError;
+
+macro_rules! impl_try_clone {
+ ($($e: ty),*) => {
+ $(impl TryClone for $e {
+ #[inline(always)]
+ fn try_clone(&self) -> Result<Self, TryReserveError>
+ where
+ Self: core::marker::Sized,
+ {
+ Ok(*self)
+ }
+ }
+ )*
+ }
+}
+
+impl_try_clone!(u8, u16, u32, u64, i8, i16, i32, i64, usize, isize, bool);
+
+impl<T: TryClone> TryClone for Option<T> {
+ #[inline]
+ fn try_clone(&self) -> Result<Self, TryReserveError> {
+ Ok(match self {
+ Some(t) => Some(t.try_clone()?),
+ None => None,
+ })
+ }
+}
+// impl<T: Copy> TryClone for T {
+// fn try_clone(&self) -> Result<Self, TryReserveError>
+// where
+// Self: core::marker::Sized,
+// {
+// Ok(*self)
+// }
+// }
diff --git a/third_party/rust/fallible_collections/src/vec.rs b/third_party/rust/fallible_collections/src/vec.rs
new file mode 100644
index 0000000000..6197b5b5d6
--- /dev/null
+++ b/third_party/rust/fallible_collections/src/vec.rs
@@ -0,0 +1,968 @@
+//! Implement Fallible Vec
+use super::TryClone;
+use crate::TryReserveError;
+#[allow(unused_imports)]
+use alloc::alloc::{alloc, realloc, Layout};
+use alloc::vec::Vec;
+use core::convert::TryInto as _;
+
+#[cfg(feature = "unstable")]
+#[macro_export]
+/// macro trying to create a vec, return a
+/// Result<Vec<T>,TryReserveError>
+macro_rules! try_vec {
+ ($elem:expr; $n:expr) => (
+ $crate::vec::try_from_elem($elem, $n)
+ );
+ ($($x:expr),*) => (
+ match <alloc::boxed::Box<_> as $crate::boxed::FallibleBox<_>>::try_new([$($x),*]) {
+ Err(e) => Err(e),
+ Ok(b) => Ok(<[_]>::into_vec(b)),
+ }
+ );
+ ($($x:expr,)*) => ($crate::try_vec![$($x),*])
+}
+
+/// trait implementing all fallible methods on vec
+pub trait FallibleVec<T> {
+ /// see reserve
+ fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError>;
+ /// see push
+ fn try_push(&mut self, elem: T) -> Result<(), TryReserveError>;
+ /// try push and give back ownership in case of error
+ fn try_push_give_back(&mut self, elem: T) -> Result<(), (T, TryReserveError)>;
+ /// see with capacity, (Self must be sized by the constraint of Result)
+ fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError>
+ where
+ Self: core::marker::Sized;
+ /// see insert
+ fn try_insert(&mut self, index: usize, element: T) -> Result<(), (T, TryReserveError)>;
+ /// see append
+ fn try_append(&mut self, other: &mut Self) -> Result<(), TryReserveError>;
+ /// see resize, only works when the `value` implements Copy, otherwise, look at try_resize_no_copy
+ fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), TryReserveError>
+ where
+ T: Copy + Clone;
+ fn try_resize_with<F>(&mut self, new_len: usize, f: F) -> Result<(), TryReserveError>
+ where
+ F: FnMut() -> T;
+ /// resize the vec by trying to clone the value repeatingly
+ fn try_resize_no_copy(&mut self, new_len: usize, value: T) -> Result<(), TryReserveError>
+ where
+ T: TryClone;
+ /// see resize, only works when the `value` implements Copy, otherwise, look at try_extend_from_slice_no_copy
+ fn try_extend_from_slice(&mut self, other: &[T]) -> Result<(), TryReserveError>
+ where
+ T: Copy + Clone;
+ /// extend the vec by trying to clone the value in `other`
+ fn try_extend_from_slice_no_copy(&mut self, other: &[T]) -> Result<(), TryReserveError>
+ where
+ T: TryClone;
+}
+
+/// TryVec is a thin wrapper around alloc::vec::Vec to provide support for
+/// fallible allocation.
+///
+/// See the crate documentation for more.
+#[derive(PartialEq)]
+pub struct TryVec<T> {
+ inner: Vec<T>,
+}
+
+impl<T> Default for TryVec<T> {
+ #[inline(always)]
+ fn default() -> Self {
+ Self {
+ inner: Default::default(),
+ }
+ }
+}
+
+impl<T: core::fmt::Debug> core::fmt::Debug for TryVec<T> {
+ #[inline]
+ fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl<T> TryVec<T> {
+ #[inline(always)]
+ pub fn new() -> Self {
+ Self { inner: Vec::new() }
+ }
+
+ #[inline]
+ pub fn with_capacity(capacity: usize) -> Result<Self, TryReserveError> {
+ Ok(Self {
+ inner: FallibleVec::try_with_capacity(capacity)?,
+ })
+ }
+
+ #[inline(always)]
+ pub fn append(&mut self, other: &mut Self) -> Result<(), TryReserveError> {
+ FallibleVec::try_append(&mut self.inner, &mut other.inner)
+ }
+
+ #[inline(always)]
+ pub fn as_mut_slice(&mut self) -> &mut [T] {
+ self
+ }
+
+ #[inline(always)]
+ pub fn as_slice(&self) -> &[T] {
+ self
+ }
+
+ #[inline(always)]
+ pub fn clear(&mut self) {
+ self.inner.clear()
+ }
+
+ #[cfg(test)]
+ pub fn into_inner(self) -> Vec<T> {
+ self.inner
+ }
+
+ #[inline(always)]
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+
+ #[inline(always)]
+ pub fn iter_mut(&mut self) -> IterMut<T> {
+ IterMut {
+ inner: self.inner.iter_mut(),
+ }
+ }
+
+ #[inline(always)]
+ pub fn iter(&self) -> Iter<T> {
+ Iter {
+ inner: self.inner.iter(),
+ }
+ }
+
+ #[inline(always)]
+ pub fn pop(&mut self) -> Option<T> {
+ self.inner.pop()
+ }
+
+ #[inline(always)]
+ pub fn push(&mut self, value: T) -> Result<(), TryReserveError> {
+ FallibleVec::try_push(&mut self.inner, value)
+ }
+
+ #[inline(always)]
+ pub fn reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ FallibleVec::try_reserve(&mut self.inner, additional)
+ }
+
+ #[inline(always)]
+ pub fn resize_with<F>(&mut self, new_len: usize, f: F) -> Result<(), TryReserveError>
+ where
+ F: FnMut() -> T,
+ {
+ FallibleVec::try_resize_with(&mut self.inner, new_len, f)
+ }
+}
+
+impl<T: TryClone> TryClone for TryVec<T> {
+ #[inline]
+ fn try_clone(&self) -> Result<Self, TryReserveError> {
+ self.as_slice().try_into()
+ }
+}
+
+impl<T: TryClone> TryVec<TryVec<T>> {
+ pub fn concat(&self) -> Result<TryVec<T>, TryReserveError> {
+ let size = self.iter().map(|v| v.inner.len()).sum();
+ let mut result = TryVec::with_capacity(size)?;
+ for v in self.iter() {
+ result.inner.try_extend_from_slice_no_copy(&v.inner)?;
+ }
+ Ok(result)
+ }
+}
+
+impl<T: TryClone> TryVec<T> {
+ #[inline(always)]
+ pub fn extend_from_slice(&mut self, other: &[T]) -> Result<(), TryReserveError> {
+ self.inner.try_extend_from_slice_no_copy(other)
+ }
+}
+
+impl<T> IntoIterator for TryVec<T> {
+ type Item = T;
+ type IntoIter = alloc::vec::IntoIter<T>;
+
+ #[inline(always)]
+ fn into_iter(self) -> Self::IntoIter {
+ self.inner.into_iter()
+ }
+}
+
+impl<'a, T> IntoIterator for &'a TryVec<T> {
+ type Item = &'a T;
+ type IntoIter = alloc::slice::Iter<'a, T>;
+
+ #[inline(always)]
+ fn into_iter(self) -> Self::IntoIter {
+ self.inner.iter()
+ }
+}
+
+#[cfg(feature = "std_io")]
+pub mod std_io {
+ use super::*;
+ use std::io::{self, Read, Take, Write};
+
+ pub trait TryRead {
+ fn try_read_to_end(&mut self, buf: &mut TryVec<u8>) -> io::Result<usize>;
+
+ #[inline]
+ fn read_into_try_vec(&mut self) -> io::Result<TryVec<u8>> {
+ let mut buf = TryVec::new();
+ self.try_read_to_end(&mut buf)?;
+ Ok(buf)
+ }
+ }
+
+ impl<T: Read> TryRead for Take<T> {
+ /// This function reserves the upper limit of what `src` can generate before
+ /// reading all bytes until EOF in this source, placing them into `buf`. If the
+ /// allocation is unsuccessful, or reading from the source generates an error
+ /// before reaching EOF, this will return an error. Otherwise, it will return
+ /// the number of bytes read.
+ ///
+ /// Since `Take::limit()` may return a value greater than the number of bytes
+ /// which can be read from the source, it's possible this function may fail
+ /// in the allocation phase even though allocating the number of bytes available
+ /// to read would have succeeded. In general, it is assumed that the callers
+ /// have accurate knowledge of the number of bytes of interest and have created
+ /// `src` accordingly.
+ #[inline]
+ fn try_read_to_end(&mut self, buf: &mut TryVec<u8>) -> io::Result<usize> {
+ try_read_up_to(self, self.limit(), buf)
+ }
+ }
+
+ /// Read up to `limit` bytes from `src`, placing them into `buf` and returning the
+ /// number of bytes read. Space for `limit` additional bytes is reserved in `buf`, so
+ /// this function will return an error if the allocation fails.
+ pub fn try_read_up_to<R: Read>(
+ src: &mut R,
+ limit: u64,
+ buf: &mut TryVec<u8>,
+ ) -> io::Result<usize> {
+ let additional = limit
+ .try_into()
+ .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+ buf.reserve(additional)
+ .map_err(|_| io::Error::new(io::ErrorKind::Other, "reserve allocation failed"))?;
+ let bytes_read = src.take(limit).read_to_end(&mut buf.inner)?;
+ Ok(bytes_read)
+ }
+
+ impl Write for TryVec<u8> {
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice(buf)
+ .map_err(|_| io::Error::new(io::ErrorKind::Other, "extend_from_slice failed"))?;
+ Ok(buf.len())
+ }
+
+ #[inline(always)]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+ }
+
+ #[cfg(test)]
+ mod tests {
+ use super::*;
+
+ #[test]
+ fn try_read_to_end() {
+ let mut src = b"1234567890".take(5);
+ let mut buf = TryVec::new();
+ src.try_read_to_end(&mut buf).unwrap();
+ assert_eq!(buf.len(), 5);
+ assert_eq!(buf, b"12345".as_ref());
+ }
+
+ #[test]
+ fn read_into_try_vec() {
+ let mut src = b"1234567890".take(5);
+ let buf = src.read_into_try_vec().unwrap();
+ assert_eq!(buf.len(), 5);
+ assert_eq!(buf, b"12345".as_ref());
+ }
+
+ #[test]
+ fn read_into_try_vec_oom() {
+ let mut src = b"1234567890".take(core::usize::MAX.try_into().expect("usize < u64"));
+ assert!(src.read_into_try_vec().is_err());
+ }
+
+ #[test]
+ fn try_read_up_to() {
+ let src = b"1234567890";
+ let mut buf = TryVec::new();
+ super::try_read_up_to(&mut src.as_ref(), 5, &mut buf).unwrap();
+ assert_eq!(buf.len(), 5);
+ assert_eq!(buf, b"12345".as_ref());
+ }
+
+ #[test]
+ fn try_read_up_to_oom() {
+ let src = b"1234567890";
+ let mut buf = TryVec::new();
+ let limit = core::usize::MAX.try_into().expect("usize < u64");
+ let res = super::try_read_up_to(&mut src.as_ref(), limit, &mut buf);
+ assert!(res.is_err());
+ }
+ }
+}
+
+impl<T: PartialEq> PartialEq<Vec<T>> for TryVec<T> {
+ #[inline(always)]
+ fn eq(&self, other: &Vec<T>) -> bool {
+ self.inner.eq(other)
+ }
+}
+
+impl<'a, T: PartialEq> PartialEq<&'a [T]> for TryVec<T> {
+ #[inline(always)]
+ fn eq(&self, other: &&[T]) -> bool {
+ self.inner.eq(other)
+ }
+}
+
+impl PartialEq<&str> for TryVec<u8> {
+ #[inline]
+ fn eq(&self, other: &&str) -> bool {
+ self.as_slice() == other.as_bytes()
+ }
+}
+
+impl core::convert::AsRef<[u8]> for TryVec<u8> {
+ #[inline(always)]
+ fn as_ref(&self) -> &[u8] {
+ self.inner.as_ref()
+ }
+}
+
+impl<T> core::convert::From<Vec<T>> for TryVec<T> {
+ #[inline(always)]
+ fn from(value: Vec<T>) -> Self {
+ Self { inner: value }
+ }
+}
+
+impl<T: TryClone> core::convert::TryFrom<&[T]> for TryVec<T> {
+ type Error = TryReserveError;
+
+ #[inline]
+ fn try_from(value: &[T]) -> Result<Self, Self::Error> {
+ let mut v = Self::new();
+ v.inner.try_extend_from_slice_no_copy(value)?;
+ Ok(v)
+ }
+}
+
+impl core::convert::TryFrom<&str> for TryVec<u8> {
+ type Error = TryReserveError;
+
+ #[inline]
+ fn try_from(value: &str) -> Result<Self, Self::Error> {
+ let mut v = Self::new();
+ v.extend_from_slice(value.as_bytes())?;
+ Ok(v)
+ }
+}
+
+impl<T> core::ops::Deref for TryVec<T> {
+ type Target = [T];
+
+ #[inline(always)]
+ fn deref(&self) -> &[T] {
+ self.inner.deref()
+ }
+}
+
+impl<T> core::ops::DerefMut for TryVec<T> {
+ fn deref_mut(&mut self) -> &mut [T] {
+ self.inner.deref_mut()
+ }
+}
+
+pub struct Iter<'a, T> {
+ inner: alloc::slice::Iter<'a, T>,
+}
+
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline(always)]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+pub struct IterMut<'a, T> {
+ inner: alloc::slice::IterMut<'a, T>,
+}
+
+impl<'a, T> Iterator for IterMut<'a, T> {
+ type Item = &'a mut T;
+
+ #[inline(always)]
+ fn next(&mut self) -> Option<Self::Item> {
+ self.inner.next()
+ }
+
+ #[inline(always)]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+/// Grow capacity exponentially
+#[cold]
+fn vec_try_reserve_for_growth<T>(v: &mut Vec<T>, additional: usize) -> Result<(), TryReserveError> {
+ // saturating, because can't use CapacityOverflow here if rust_1_57 flag is enabled
+ FallibleVec::try_reserve(v, additional.max(v.capacity().saturating_mul(2) - v.len()))
+}
+
+fn needs_to_grow<T>(v: &Vec<T>, len: usize) -> bool {
+ v.len().checked_add(len).map_or(true, |needed| needed > v.capacity())
+}
+
+#[cfg(not(any(feature = "unstable", feature = "rust_1_57")))]
+fn vec_try_reserve<T>(v: &mut Vec<T>, additional: usize) -> Result<(), TryReserveError> {
+ let available = v.capacity().checked_sub(v.len()).expect("capacity >= len");
+ if additional > available {
+ let increase = additional
+ .checked_sub(available)
+ .expect("additional > available");
+ let new_cap = v
+ .capacity()
+ .checked_add(increase)
+ .ok_or(TryReserveError::CapacityOverflow)?;
+ vec_try_extend(v, new_cap)?;
+ debug_assert!(v.capacity() == new_cap);
+ }
+
+ Ok(())
+}
+
+#[cfg(not(any(feature = "unstable", feature = "rust_1_57")))]
+fn vec_try_extend<T>(v: &mut Vec<T>, new_cap: usize) -> Result<(), TryReserveError> {
+ let old_len = v.len();
+ let old_cap: usize = v.capacity();
+
+ if old_cap >= new_cap {
+ return Ok(());
+ }
+
+ let elem_size = core::mem::size_of::<T>();
+ let new_alloc_size = new_cap
+ .checked_mul(elem_size)
+ .filter(|size| *size <= isize::MAX as usize)
+ .ok_or(TryReserveError::CapacityOverflow)?;
+
+ // required for alloc safety
+ // See https://doc.rust-lang.org/stable/std/alloc/trait.GlobalAlloc.html#safety-1
+ // Should be unreachable given prior `old_cap >= new_cap` check.
+ assert!(new_alloc_size > 0);
+
+ let align = core::mem::align_of::<T>();
+
+ let (new_ptr, layout) = {
+ if old_cap == 0 {
+ let layout = Layout::from_size_align(new_alloc_size, align).expect("Invalid layout");
+ let new_ptr = unsafe { alloc(layout) };
+ (new_ptr, layout)
+ } else {
+ let old_alloc_size = old_cap
+ .checked_mul(elem_size)
+ .ok_or(TryReserveError::CapacityOverflow)?;
+ let layout = Layout::from_size_align(old_alloc_size, align).expect("Invalid layout");
+ let new_ptr = unsafe { realloc(v.as_mut_ptr() as *mut u8, layout, new_alloc_size) };
+ (new_ptr, layout)
+ }
+ };
+
+ if new_ptr.is_null() {
+ return Err(TryReserveError::AllocError { layout });
+ }
+
+ let new_vec = unsafe { Vec::from_raw_parts(new_ptr.cast(), old_len, new_cap) };
+
+ core::mem::forget(core::mem::replace(v, new_vec));
+ Ok(())
+}
+
+impl<T> FallibleVec<T> for Vec<T> {
+ #[inline(always)]
+ fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ #[cfg(all(feature = "unstable", not(feature = "rust_1_57")))]
+ {
+ self.try_reserve(additional)
+ }
+
+ #[cfg(not(feature = "rust_1_57"))]
+ {
+ vec_try_reserve(self, additional)
+ }
+
+ #[cfg(feature = "rust_1_57")]
+ {
+ // TryReserveError is an opaque type in 1.57
+ self.try_reserve(additional).map_err(|_| {
+ crate::make_try_reserve_error(self.len(), additional, core::mem::size_of::<T>(), core::mem::align_of::<T>())
+ })
+ }
+ }
+
+ #[inline]
+ fn try_push(&mut self, elem: T) -> Result<(), TryReserveError> {
+ if self.len() == self.capacity() {
+ vec_try_reserve_for_growth(self, 1)?;
+ }
+ Ok(self.push(elem))
+ }
+
+ #[inline]
+ fn try_push_give_back(&mut self, elem: T) -> Result<(), (T, TryReserveError)> {
+ if self.len() == self.capacity() {
+ if let Err(e) = vec_try_reserve_for_growth(self, 1) {
+ return Err((elem, e));
+ }
+ }
+ Ok(self.push(elem))
+ }
+
+ #[inline]
+ fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError>
+ where
+ Self: core::marker::Sized,
+ {
+ let mut n = Self::new();
+ FallibleVec::try_reserve(&mut n, capacity)?;
+ Ok(n)
+ }
+
+ #[inline]
+ fn try_insert(&mut self, index: usize, element: T) -> Result<(), (T, TryReserveError)> {
+ if self.len() == self.capacity() {
+ if let Err(e) = vec_try_reserve_for_growth(self, 1) {
+ return Err((element, e));
+ }
+ }
+ Ok(self.insert(index, element))
+ }
+ #[inline]
+ fn try_append(&mut self, other: &mut Self) -> Result<(), TryReserveError> {
+ FallibleVec::try_reserve(self, other.len())?;
+ Ok(self.append(other))
+ }
+ fn try_resize(&mut self, new_len: usize, value: T) -> Result<(), TryReserveError>
+ where
+ T: Copy + Clone,
+ {
+ let len = self.len();
+ if new_len > len {
+ FallibleVec::try_reserve(self, new_len - len)?;
+ }
+ Ok(self.resize(new_len, value))
+ }
+ fn try_resize_with<F>(&mut self, new_len: usize, f: F) -> Result<(), TryReserveError>
+ where
+ F: FnMut() -> T,
+ {
+ let len = self.len();
+ if new_len > len {
+ FallibleVec::try_reserve(self, new_len - len)?;
+ }
+ Ok(self.resize_with(new_len, f))
+ }
+ fn try_resize_no_copy(&mut self, new_len: usize, value: T) -> Result<(), TryReserveError>
+ where
+ T: TryClone,
+ {
+ let len = self.len();
+
+ if new_len > len {
+ self.try_extend_with(new_len - len, TryExtendElement(value))
+ } else {
+ Ok(self.truncate(new_len))
+ }
+ }
+ #[inline]
+ fn try_extend_from_slice(&mut self, other: &[T]) -> Result<(), TryReserveError>
+ where
+ T: Clone,
+ {
+ if needs_to_grow(self, other.len()) {
+ vec_try_reserve_for_growth(self, other.len())?;
+ }
+ Ok(self.extend_from_slice(other))
+ }
+ fn try_extend_from_slice_no_copy(&mut self, other: &[T]) -> Result<(), TryReserveError>
+ where
+ T: TryClone,
+ {
+ if needs_to_grow(self, other.len()) {
+ vec_try_reserve_for_growth(self, other.len())?;
+ }
+ let mut len = self.len();
+ let mut iterator = other.iter();
+ while let Some(element) = iterator.next() {
+ unsafe {
+ core::ptr::write(self.get_unchecked_mut(len), element.try_clone()?);
+ // NB can't overflow since we would have had to alloc the address space
+ len += 1;
+ self.set_len(len);
+ }
+ }
+ Ok(())
+ }
+}
+
+trait ExtendWith<T> {
+ fn next(&mut self) -> Result<T, TryReserveError>;
+ fn last(self) -> T;
+}
+
+struct TryExtendElement<T: TryClone>(T);
+impl<T: TryClone> ExtendWith<T> for TryExtendElement<T> {
+ #[inline(always)]
+ fn next(&mut self) -> Result<T, TryReserveError> {
+ self.0.try_clone()
+ }
+ #[inline(always)]
+ fn last(self) -> T {
+ self.0
+ }
+}
+
+trait TryExtend<T> {
+ fn try_extend_with<E: ExtendWith<T>>(
+ &mut self,
+ n: usize,
+ value: E,
+ ) -> Result<(), TryReserveError>;
+}
+
+impl<T> TryExtend<T> for Vec<T> {
+ /// Extend the vector by `n` values, using the given generator.
+ fn try_extend_with<E: ExtendWith<T>>(
+ &mut self,
+ n: usize,
+ mut value: E,
+ ) -> Result<(), TryReserveError> {
+ if needs_to_grow(self, n) {
+ vec_try_reserve_for_growth(self, n)?;
+ }
+
+ unsafe {
+ let mut ptr = self.as_mut_ptr().add(self.len());
+
+ let mut local_len = self.len();
+ // Write all elements except the last one
+ for _ in 1..n {
+ core::ptr::write(ptr, value.next()?);
+ ptr = ptr.offset(1);
+ // Increment the length in every step in case next() panics
+ local_len += 1;
+ self.set_len(local_len);
+ }
+
+ if n > 0 {
+ // We can write the last element directly without cloning needlessly
+ core::ptr::write(ptr, value.last());
+ local_len += 1;
+ self.set_len(local_len);
+ }
+
+ // len set by scope guard
+ }
+ Ok(())
+ }
+}
+
+trait Truncate {
+ fn truncate(&mut self, len: usize);
+}
+
+impl<T> Truncate for Vec<T> {
+ fn truncate(&mut self, len: usize) {
+ let current_len = self.len();
+ unsafe {
+ let mut ptr = self.as_mut_ptr().add(current_len);
+ // Set the final length at the end, keeping in mind that
+ // dropping an element might panic. Works around a missed
+ // optimization, as seen in the following issue:
+ // https://github.com/rust-lang/rust/issues/51802
+ let mut local_len = self.len();
+
+ // drop any extra elements
+ for _ in len..current_len {
+ ptr = ptr.offset(-1);
+ core::ptr::drop_in_place(ptr);
+ local_len -= 1;
+ self.set_len(local_len);
+ }
+ }
+ }
+}
+
+/// try creating a vec from an `elem` cloned `n` times, see std::from_elem
+#[cfg(feature = "unstable")]
+pub fn try_from_elem<T: TryClone>(elem: T, n: usize) -> Result<Vec<T>, TryReserveError> {
+ <T as SpecFromElem>::try_from_elem(elem, n)
+}
+
+// Specialization trait used for Vec::from_elem
+#[cfg(feature = "unstable")]
+trait SpecFromElem: Sized {
+ fn try_from_elem(elem: Self, n: usize) -> Result<Vec<Self>, TryReserveError>;
+}
+
+#[cfg(feature = "unstable")]
+impl<T: TryClone> SpecFromElem for T {
+ default fn try_from_elem(elem: Self, n: usize) -> Result<Vec<T>, TryReserveError> {
+ let mut v = Vec::new();
+ v.try_resize_no_copy(n, elem)?;
+ Ok(v)
+ }
+}
+
+#[cfg(feature = "unstable")]
+impl SpecFromElem for u8 {
+ #[inline]
+ fn try_from_elem(elem: u8, n: usize) -> Result<Vec<u8>, TryReserveError> {
+ unsafe {
+ let mut v: Vec<u8> = FallibleVec::try_with_capacity(n)?;
+ core::ptr::write_bytes(v.as_mut_ptr(), elem, n);
+ v.set_len(n);
+ Ok(v)
+ }
+ }
+}
+
+impl<T: TryClone> TryClone for Vec<T> {
+ #[inline]
+ fn try_clone(&self) -> Result<Self, TryReserveError>
+ where
+ Self: core::marker::Sized,
+ {
+ let mut v = Vec::new();
+ v.try_extend_from_slice_no_copy(self)?;
+ Ok(v)
+ }
+}
+
+pub trait TryFromIterator<I>: Sized {
+ fn try_from_iterator<T: IntoIterator<Item = I>>(iterator: T) -> Result<Self, TryReserveError>;
+}
+
+impl<I> TryFromIterator<I> for Vec<I> {
+ fn try_from_iterator<T: IntoIterator<Item = I>>(iterator: T) -> Result<Self, TryReserveError>
+ where
+ T: IntoIterator<Item = I>,
+ {
+ let mut new = Self::new();
+ for i in iterator {
+ new.try_push(i)?;
+ }
+ Ok(new)
+ }
+}
+
+pub trait TryCollect<I> {
+ fn try_collect<C: TryFromIterator<I>>(self) -> Result<C, TryReserveError>;
+}
+
+impl<I, T> TryCollect<I> for T
+where
+ T: IntoIterator<Item = I>,
+{
+ #[inline(always)]
+ fn try_collect<C: TryFromIterator<I>>(self) -> Result<C, TryReserveError> {
+ C::try_from_iterator(self)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ #[cfg(feature = "unstable")]
+ fn vec() {
+ // let v: Vec<u8> = from_elem(1, 10);
+ let v: Vec<Vec<u8>> = try_vec![try_vec![42; 10].unwrap(); 100].unwrap();
+ println!("{:?}", v);
+ let v2 = try_vec![0, 1, 2];
+ println!("{:?}", v2);
+ assert_eq!(2 + 2, 4);
+ }
+
+ #[test]
+ fn try_clone_vec() {
+ // let v: Vec<u8> = from_elem(1, 10);
+ let v = vec![42; 100];
+ assert_eq!(v.try_clone().unwrap(), v);
+ }
+
+ #[test]
+ fn try_clone_oom() {
+ let layout = Layout::new::<u8>();
+ let v =
+ unsafe { Vec::<u8>::from_raw_parts(alloc(layout), core::isize::MAX as usize, core::isize::MAX as usize) };
+ assert!(v.try_clone().is_err());
+ }
+
+ #[test]
+ fn tryvec_try_clone_oom() {
+ let layout = Layout::new::<u8>();
+ let inner =
+ unsafe { Vec::<u8>::from_raw_parts(alloc(layout), core::isize::MAX as usize, core::isize::MAX as usize) };
+ let tv = TryVec { inner };
+ assert!(tv.try_clone().is_err());
+ }
+
+ // #[test]
+ // fn try_out_of_mem() {
+ // let v = try_vec![42_u8; 1000000000];
+ // assert_eq!(v.try_clone().unwrap(), v);
+ // }
+
+ #[test]
+ fn oom() {
+ let mut vec: Vec<char> = Vec::new();
+ match FallibleVec::try_reserve(&mut vec, core::usize::MAX / std::mem::size_of::<char>()) {
+ Ok(_) => panic!("it should be OOM"),
+ _ => (),
+ }
+ match FallibleVec::try_reserve(&mut vec, core::usize::MAX) {
+ Ok(_) => panic!("it should be OOM"),
+ _ => (),
+ }
+ }
+
+ #[test]
+ fn tryvec_oom() {
+ let mut vec: TryVec<char> = TryVec::new();
+ match vec.reserve(core::usize::MAX / std::mem::size_of::<char>()) {
+ Ok(_) => panic!("it should be OOM"),
+ _ => (),
+ }
+ match vec.reserve(core::usize::MAX) {
+ Ok(_) => panic!("it should be OOM"),
+ _ => (),
+ }
+ }
+
+ #[test]
+ fn try_reserve() {
+ let mut vec: Vec<_> = vec![1];
+ let additional_room = vec.capacity() - vec.len();
+ let additional = additional_room + 1;
+ let old_cap = vec.capacity();
+ FallibleVec::try_reserve(&mut vec, additional).unwrap();
+ assert!(vec.capacity() > old_cap);
+ }
+
+ #[test]
+ fn tryvec_reserve() {
+ let mut vec: TryVec<_> = vec![1].into();
+ let old_cap = vec.inner.capacity();
+ let new_cap = old_cap + 1;
+ vec.reserve(new_cap).unwrap();
+ assert!(vec.inner.capacity() >= new_cap);
+ }
+
+ #[test]
+ fn try_reserve_idempotent() {
+ let mut vec: Vec<_> = vec![1];
+ let additional_room = vec.capacity() - vec.len();
+ let additional = additional_room + 1;
+ FallibleVec::try_reserve(&mut vec, additional).unwrap();
+ let cap_after_reserve = vec.capacity();
+ FallibleVec::try_reserve(&mut vec, additional).unwrap();
+ assert_eq!(vec.capacity(), cap_after_reserve);
+ }
+
+ #[test]
+ fn tryvec_reserve_idempotent() {
+ let mut vec: TryVec<_> = vec![1].into();
+ let old_cap = vec.inner.capacity();
+ let new_cap = old_cap + 1;
+ vec.reserve(new_cap).unwrap();
+ let cap_after_reserve = vec.inner.capacity();
+ vec.reserve(new_cap).unwrap();
+ assert_eq!(cap_after_reserve, vec.inner.capacity());
+ }
+
+ #[test]
+ fn capacity_overflow() {
+ let mut vec: Vec<_> = vec![1];
+ match FallibleVec::try_reserve(&mut vec, core::usize::MAX) {
+ Ok(_) => panic!("capacity calculation should overflow"),
+ _ => (),
+ }
+ }
+
+ #[test]
+ fn tryvec_capacity_overflow() {
+ let mut vec: TryVec<_> = vec![1].into();
+ match vec.reserve(core::usize::MAX) {
+ Ok(_) => panic!("capacity calculation should overflow"),
+ _ => (),
+ }
+ }
+
+ #[test]
+ fn extend_from_slice() {
+ let mut vec: Vec<u8> = b"foo".as_ref().into();
+ vec.shrink_to_fit();
+ vec.reserve(5);
+ assert_eq!(8, vec.capacity());
+ vec.try_extend_from_slice(b"bar").unwrap();
+ assert_eq!(vec, b"foobar".as_ref());
+ vec.try_extend_from_slice(b"1").unwrap();
+ assert_eq!(vec, b"foobar1".as_ref());
+ assert_eq!(8, vec.capacity());
+ vec.try_extend_from_slice(b"11").unwrap();
+ assert_eq!(16, vec.capacity());
+ }
+
+ #[test]
+ fn tryvec_extend_from_slice() {
+ let mut vec: TryVec<u8> = b"foo".as_ref().try_into().unwrap();
+ vec.extend_from_slice(b"bar").unwrap();
+ assert_eq!(vec, b"foobar".as_ref());
+ }
+
+ #[test]
+ #[cfg(not(any(feature = "unstable", feature = "rust_1_57")))]
+ fn try_extend_zst() {
+ let mut vec: Vec<()> = Vec::new();
+ assert_eq!(vec.capacity(), core::usize::MAX);
+ assert!(vec_try_extend(&mut vec, 10).is_ok());
+ assert!(vec_try_extend(&mut vec, core::usize::MAX).is_ok());
+ }
+
+ #[test]
+ fn try_reserve_zst() {
+ let mut vec: Vec<()> = Vec::new();
+ assert!(FallibleVec::try_reserve(&mut vec, core::usize::MAX).is_ok());
+ }
+}