summaryrefslogtreecommitdiffstats
path: root/vendor/im-rc/src
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/im-rc/src')
-rw-r--r--vendor/im-rc/src/arbitrary.rs98
-rw-r--r--vendor/im-rc/src/config.rs20
-rw-r--r--vendor/im-rc/src/fakepool.rs208
-rw-r--r--vendor/im-rc/src/hash/map.rs2379
-rw-r--r--vendor/im-rc/src/hash/mod.rs8
-rw-r--r--vendor/im-rc/src/hash/set.rs1134
-rw-r--r--vendor/im-rc/src/iter.rs42
-rw-r--r--vendor/im-rc/src/lib.rs507
-rw-r--r--vendor/im-rc/src/nodes/btree.rs1368
-rw-r--r--vendor/im-rc/src/nodes/hamt.rs726
-rw-r--r--vendor/im-rc/src/nodes/mod.rs16
-rw-r--r--vendor/im-rc/src/nodes/rrb.rs1101
-rw-r--r--vendor/im-rc/src/ord/map.rs2649
-rw-r--r--vendor/im-rc/src/ord/mod.rs8
-rw-r--r--vendor/im-rc/src/ord/set.rs1243
-rw-r--r--vendor/im-rc/src/ord/test-fixtures/issue_124.txt3492
-rw-r--r--vendor/im-rc/src/proptest.rs164
-rw-r--r--vendor/im-rc/src/quickcheck.rs43
-rw-r--r--vendor/im-rc/src/ser.rs293
-rw-r--r--vendor/im-rc/src/sort.rs203
-rw-r--r--vendor/im-rc/src/sync.rs69
-rw-r--r--vendor/im-rc/src/test.rs86
-rw-r--r--vendor/im-rc/src/tests/hashset.rs85
-rw-r--r--vendor/im-rc/src/tests/mod.rs24
-rw-r--r--vendor/im-rc/src/tests/ordset.rs85
-rw-r--r--vendor/im-rc/src/tests/vector.rs231
-rw-r--r--vendor/im-rc/src/util.rs142
-rw-r--r--vendor/im-rc/src/vector/focus.rs909
-rw-r--r--vendor/im-rc/src/vector/mod.rs2745
-rw-r--r--vendor/im-rc/src/vector/pool.rs74
-rw-r--r--vendor/im-rc/src/vector/rayon.rs209
31 files changed, 20361 insertions, 0 deletions
diff --git a/vendor/im-rc/src/arbitrary.rs b/vendor/im-rc/src/arbitrary.rs
new file mode 100644
index 000000000..777a3b36f
--- /dev/null
+++ b/vendor/im-rc/src/arbitrary.rs
@@ -0,0 +1,98 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use std::hash::{BuildHasher, Hash};
+
+use ::arbitrary::{size_hint, Arbitrary, Result, Unstructured};
+
+use crate::{HashMap, HashSet, OrdMap, OrdSet, Vector};
+
+impl<'a, A: Arbitrary<'a> + Clone> Arbitrary<'a> for Vector<A> {
+ fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_iter()?.collect()
+ }
+
+ fn arbitrary_take_rest(u: Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_take_rest_iter()?.collect()
+ }
+
+ fn size_hint(depth: usize) -> (usize, Option<usize>) {
+ size_hint::recursion_guard(depth, |depth| {
+ size_hint::and(<usize as Arbitrary>::size_hint(depth), (0, None))
+ })
+ }
+}
+
+impl<'a, K: Arbitrary<'a> + Ord + Clone, V: Arbitrary<'a> + Clone> Arbitrary<'a> for OrdMap<K, V> {
+ fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_iter()?.collect()
+ }
+
+ fn arbitrary_take_rest(u: Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_take_rest_iter()?.collect()
+ }
+
+ fn size_hint(depth: usize) -> (usize, Option<usize>) {
+ size_hint::recursion_guard(depth, |depth| {
+ size_hint::and(<usize as Arbitrary>::size_hint(depth), (0, None))
+ })
+ }
+}
+
+impl<'a, A: Arbitrary<'a> + Ord + Clone> Arbitrary<'a> for OrdSet<A> {
+ fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_iter()?.collect()
+ }
+
+ fn arbitrary_take_rest(u: Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_take_rest_iter()?.collect()
+ }
+
+ fn size_hint(depth: usize) -> (usize, Option<usize>) {
+ size_hint::recursion_guard(depth, |depth| {
+ size_hint::and(<usize as Arbitrary>::size_hint(depth), (0, None))
+ })
+ }
+}
+
+impl<'a, K, V, S> Arbitrary<'a> for HashMap<K, V, S>
+where
+ K: Arbitrary<'a> + Hash + Eq + Clone,
+ V: Arbitrary<'a> + Clone,
+ S: BuildHasher + Default + 'static,
+{
+ fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_iter()?.collect()
+ }
+
+ fn arbitrary_take_rest(u: Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_take_rest_iter()?.collect()
+ }
+
+ fn size_hint(depth: usize) -> (usize, Option<usize>) {
+ size_hint::recursion_guard(depth, |depth| {
+ size_hint::and(<usize as Arbitrary>::size_hint(depth), (0, None))
+ })
+ }
+}
+
+impl<'a, A, S> Arbitrary<'a> for HashSet<A, S>
+where
+ A: Arbitrary<'a> + Hash + Eq + Clone,
+ S: BuildHasher + Default + 'static,
+{
+ fn arbitrary(u: &mut Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_iter()?.collect()
+ }
+
+ fn arbitrary_take_rest(u: Unstructured<'a>) -> Result<Self> {
+ u.arbitrary_take_rest_iter()?.collect()
+ }
+
+ fn size_hint(depth: usize) -> (usize, Option<usize>) {
+ size_hint::recursion_guard(depth, |depth| {
+ size_hint::and(<usize as Arbitrary>::size_hint(depth), (0, None))
+ })
+ }
+}
diff --git a/vendor/im-rc/src/config.rs b/vendor/im-rc/src/config.rs
new file mode 100644
index 000000000..f8611396c
--- /dev/null
+++ b/vendor/im-rc/src/config.rs
@@ -0,0 +1,20 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use typenum::*;
+
+/// The branching factor of RRB-trees
+pub(crate) type VectorChunkSize = U64;
+
+/// The branching factor of B-trees
+pub(crate) type OrdChunkSize = U64; // Must be an even number!
+
+/// The level size of HAMTs, in bits
+/// Branching factor is 2 ^ HashLevelSize.
+pub(crate) type HashLevelSize = U5;
+
+/// The size of per-instance memory pools if the `pool` feature is enabled.
+/// This is set to 0, meaning you have to opt in to using a pool by constructing
+/// with eg. `Vector::with_pool(pool)` even if the `pool` feature is enabled.
+pub(crate) const POOL_SIZE: usize = 0;
diff --git a/vendor/im-rc/src/fakepool.rs b/vendor/im-rc/src/fakepool.rs
new file mode 100644
index 000000000..5ff36f7e5
--- /dev/null
+++ b/vendor/im-rc/src/fakepool.rs
@@ -0,0 +1,208 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#![allow(dead_code)]
+
+use std::marker::PhantomData;
+use std::ops::Deref;
+use std::rc::Rc as RRc;
+use std::sync::Arc as RArc;
+
+use crate::nodes::chunk::Chunk;
+
+pub(crate) trait PoolDefault: Default {}
+pub(crate) trait PoolClone: Clone {}
+
+impl<A> PoolDefault for Chunk<A> {}
+impl<A> PoolClone for Chunk<A> where A: Clone {}
+
+pub(crate) struct Pool<A>(PhantomData<A>);
+
+impl<A> Pool<A> {
+ pub(crate) fn new(_size: usize) -> Self {
+ Pool(PhantomData)
+ }
+
+ pub(crate) fn get_pool_size(&self) -> usize {
+ 0
+ }
+
+ pub(crate) fn fill(&self) {}
+}
+
+impl<A> Clone for Pool<A> {
+ fn clone(&self) -> Self {
+ Self::new(0)
+ }
+}
+
+// Rc
+
+#[derive(Default)]
+pub(crate) struct Rc<A>(RRc<A>);
+
+impl<A> Rc<A> {
+ #[inline(always)]
+ pub(crate) fn default(_pool: &Pool<A>) -> Self
+ where
+ A: PoolDefault,
+ {
+ Self(Default::default())
+ }
+
+ #[inline(always)]
+ pub(crate) fn new(_pool: &Pool<A>, value: A) -> Self {
+ Rc(RRc::new(value))
+ }
+
+ #[inline(always)]
+ pub(crate) fn clone_from(_pool: &Pool<A>, value: &A) -> Self
+ where
+ A: PoolClone,
+ {
+ Rc(RRc::new(value.clone()))
+ }
+
+ #[inline(always)]
+ pub(crate) fn make_mut<'a>(_pool: &Pool<A>, this: &'a mut Self) -> &'a mut A
+ where
+ A: PoolClone,
+ {
+ RRc::make_mut(&mut this.0)
+ }
+
+ #[inline(always)]
+ pub(crate) fn ptr_eq(left: &Self, right: &Self) -> bool {
+ RRc::ptr_eq(&left.0, &right.0)
+ }
+
+ pub(crate) fn unwrap_or_clone(this: Self) -> A
+ where
+ A: PoolClone,
+ {
+ RRc::try_unwrap(this.0).unwrap_or_else(|r| (*r).clone())
+ }
+}
+
+impl<A> Clone for Rc<A> {
+ #[inline(always)]
+ fn clone(&self) -> Self {
+ Rc(self.0.clone())
+ }
+}
+
+impl<A> Deref for Rc<A> {
+ type Target = A;
+ #[inline(always)]
+ fn deref(&self) -> &Self::Target {
+ self.0.deref()
+ }
+}
+
+impl<A> PartialEq for Rc<A>
+where
+ A: PartialEq,
+{
+ #[inline(always)]
+ fn eq(&self, other: &Self) -> bool {
+ **self == **other
+ }
+}
+
+impl<A> Eq for Rc<A> where A: Eq {}
+
+impl<A> std::fmt::Debug for Rc<A>
+where
+ A: std::fmt::Debug,
+{
+ #[inline(always)]
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ self.0.fmt(f)
+ }
+}
+
+// Arc
+
+#[derive(Default)]
+pub(crate) struct Arc<A>(RArc<A>);
+
+impl<A> Arc<A> {
+ #[inline(always)]
+ pub(crate) fn default(_pool: &Pool<A>) -> Self
+ where
+ A: PoolDefault,
+ {
+ Self(Default::default())
+ }
+
+ #[inline(always)]
+ pub(crate) fn new(_pool: &Pool<A>, value: A) -> Self {
+ Self(RArc::new(value))
+ }
+
+ #[inline(always)]
+ pub(crate) fn clone_from(_pool: &Pool<A>, value: &A) -> Self
+ where
+ A: PoolClone,
+ {
+ Self(RArc::new(value.clone()))
+ }
+
+ #[inline(always)]
+ pub(crate) fn make_mut<'a>(_pool: &Pool<A>, this: &'a mut Self) -> &'a mut A
+ where
+ A: PoolClone,
+ {
+ RArc::make_mut(&mut this.0)
+ }
+
+ #[inline(always)]
+ pub(crate) fn ptr_eq(left: &Self, right: &Self) -> bool {
+ RArc::ptr_eq(&left.0, &right.0)
+ }
+
+ pub(crate) fn unwrap_or_clone(this: Self) -> A
+ where
+ A: PoolClone,
+ {
+ RArc::try_unwrap(this.0).unwrap_or_else(|r| (*r).clone())
+ }
+}
+
+impl<A> Clone for Arc<A> {
+ #[inline(always)]
+ fn clone(&self) -> Self {
+ Self(self.0.clone())
+ }
+}
+
+impl<A> Deref for Arc<A> {
+ type Target = A;
+ #[inline(always)]
+ fn deref(&self) -> &Self::Target {
+ self.0.deref()
+ }
+}
+
+impl<A> PartialEq for Arc<A>
+where
+ A: PartialEq,
+{
+ #[inline(always)]
+ fn eq(&self, other: &Self) -> bool {
+ **self == **other
+ }
+}
+
+impl<A> Eq for Arc<A> where A: Eq {}
+
+impl<A> std::fmt::Debug for Arc<A>
+where
+ A: std::fmt::Debug,
+{
+ #[inline(always)]
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
+ self.0.fmt(f)
+ }
+}
diff --git a/vendor/im-rc/src/hash/map.rs b/vendor/im-rc/src/hash/map.rs
new file mode 100644
index 000000000..2c2761b0e
--- /dev/null
+++ b/vendor/im-rc/src/hash/map.rs
@@ -0,0 +1,2379 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+//! An unordered map.
+//!
+//! An immutable hash map using [hash array mapped tries][1].
+//!
+//! Most operations on this map are O(log<sub>x</sub> n) for a
+//! suitably high *x* that it should be nearly O(1) for most maps.
+//! Because of this, it's a great choice for a generic map as long as
+//! you don't mind that keys will need to implement
+//! [`Hash`][std::hash::Hash] and [`Eq`][std::cmp::Eq].
+//!
+//! Map entries will have a predictable order based on the hasher
+//! being used. Unless otherwise specified, this will be the standard
+//! [`RandomState`][std::collections::hash_map::RandomState] hasher.
+//!
+//! [1]: https://en.wikipedia.org/wiki/Hash_array_mapped_trie
+//! [std::cmp::Eq]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+//! [std::hash::Hash]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+//! [std::collections::hash_map::RandomState]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::collections;
+use std::collections::hash_map::RandomState;
+use std::fmt::{Debug, Error, Formatter};
+use std::hash::{BuildHasher, Hash, Hasher};
+use std::iter::{FromIterator, FusedIterator, Sum};
+use std::mem;
+use std::ops::{Add, Index, IndexMut};
+
+use crate::nodes::hamt::{
+ hash_key, Drain as NodeDrain, HashBits, HashValue, Iter as NodeIter, IterMut as NodeIterMut,
+ Node,
+};
+use crate::util::{Pool, PoolRef, Ref};
+
+/// Construct a hash map from a sequence of key/value pairs.
+///
+/// # Examples
+///
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::hashmap::HashMap;
+/// # fn main() {
+/// assert_eq!(
+/// hashmap!{
+/// 1 => 11,
+/// 2 => 22,
+/// 3 => 33
+/// },
+/// HashMap::from(vec![(1, 11), (2, 22), (3, 33)])
+/// );
+/// # }
+/// ```
+#[macro_export]
+macro_rules! hashmap {
+ () => { $crate::hashmap::HashMap::new() };
+
+ ( $( $key:expr => $value:expr ),* ) => {{
+ let mut map = $crate::hashmap::HashMap::new();
+ $({
+ map.insert($key, $value);
+ })*;
+ map
+ }};
+
+ ( $( $key:expr => $value:expr ,)* ) => {{
+ let mut map = $crate::hashmap::HashMap::new();
+ $({
+ map.insert($key, $value);
+ })*;
+ map
+ }};
+}
+
+def_pool!(HashMapPool<K,V>, Node<(K,V)>);
+
+/// An unordered map.
+///
+/// An immutable hash map using [hash array mapped tries] [1].
+///
+/// Most operations on this map are O(log<sub>x</sub> n) for a
+/// suitably high *x* that it should be nearly O(1) for most maps.
+/// Because of this, it's a great choice for a generic map as long as
+/// you don't mind that keys will need to implement
+/// [`Hash`][std::hash::Hash] and [`Eq`][std::cmp::Eq].
+///
+/// Map entries will have a predictable order based on the hasher
+/// being used. Unless otherwise specified, this will be the standard
+/// [`RandomState`][std::collections::hash_map::RandomState] hasher.
+///
+/// [1]: https://en.wikipedia.org/wiki/Hash_array_mapped_trie
+/// [std::cmp::Eq]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+/// [std::hash::Hash]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+/// [std::collections::hash_map::RandomState]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+
+pub struct HashMap<K, V, S = RandomState> {
+ size: usize,
+ pool: HashMapPool<K, V>,
+ root: PoolRef<Node<(K, V)>>,
+ hasher: Ref<S>,
+}
+
+impl<K, V> HashValue for (K, V)
+where
+ K: Eq,
+{
+ type Key = K;
+
+ fn extract_key(&self) -> &Self::Key {
+ &self.0
+ }
+
+ fn ptr_eq(&self, _other: &Self) -> bool {
+ false
+ }
+}
+
+impl<K, V> HashMap<K, V, RandomState> {
+ /// Construct an empty hash map.
+ #[inline]
+ #[must_use]
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ /// Construct an empty hash map using a specific memory pool.
+ #[cfg(feature = "pool")]
+ #[must_use]
+ pub fn with_pool(pool: &HashMapPool<K, V>) -> Self {
+ let root = PoolRef::default(&pool.0);
+ Self {
+ size: 0,
+ hasher: Default::default(),
+ pool: pool.clone(),
+ root,
+ }
+ }
+}
+
+impl<K, V> HashMap<K, V, RandomState>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+{
+ /// Construct a hash map with a single mapping.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map = HashMap::unit(123, "onetwothree");
+ /// assert_eq!(
+ /// map.get(&123),
+ /// Some(&"onetwothree")
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn unit(k: K, v: V) -> HashMap<K, V> {
+ HashMap::new().update(k, v)
+ }
+}
+
+impl<K, V, S> HashMap<K, V, S> {
+ /// Test whether a hash map is empty.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// assert!(
+ /// !hashmap!{1 => 2}.is_empty()
+ /// );
+ /// assert!(
+ /// HashMap::<i32, i32>::new().is_empty()
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Get the size of a hash map.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// assert_eq!(3, hashmap!{
+ /// 1 => 11,
+ /// 2 => 22,
+ /// 3 => 33
+ /// }.len());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.size
+ }
+
+ /// Test whether two maps refer to the same content in memory.
+ ///
+ /// This is true if the two sides are references to the same map,
+ /// or if the two maps refer to the same root node.
+ ///
+ /// This would return true if you're comparing a map to itself, or
+ /// if you're comparing a map to a fresh clone of itself.
+ ///
+ /// Time: O(1)
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ std::ptr::eq(self, other) || PoolRef::ptr_eq(&self.root, &other.root)
+ }
+
+ /// Get a reference to the memory pool used by this map.
+ ///
+ /// Note that if you didn't specifically construct it with a pool, you'll
+ /// get back a reference to a pool of size 0.
+ #[cfg(feature = "pool")]
+ pub fn pool(&self) -> &HashMapPool<K, V> {
+ &self.pool
+ }
+
+ /// Construct an empty hash map using the provided hasher.
+ #[inline]
+ #[must_use]
+ pub fn with_hasher<RS>(hasher: RS) -> Self
+ where
+ Ref<S>: From<RS>,
+ {
+ let pool = HashMapPool::default();
+ let root = PoolRef::default(&pool.0);
+ HashMap {
+ size: 0,
+ hasher: hasher.into(),
+ pool,
+ root,
+ }
+ }
+
+ /// Construct an empty hash map using a specific memory pool and hasher.
+ #[cfg(feature = "pool")]
+ #[must_use]
+ pub fn with_pool_hasher<RS>(pool: &HashMapPool<K, V>, hasher: RS) -> Self
+ where
+ Ref<S>: From<RS>,
+ {
+ let root = PoolRef::default(&pool.0);
+ Self {
+ size: 0,
+ hasher: hasher.into(),
+ pool: pool.clone(),
+ root,
+ }
+ }
+
+ /// Get a reference to the map's [`BuildHasher`][BuildHasher].
+ ///
+ /// [BuildHasher]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+ #[must_use]
+ pub fn hasher(&self) -> &Ref<S> {
+ &self.hasher
+ }
+
+ /// Construct an empty hash map using the same hasher as the
+ /// current hash map.
+ #[inline]
+ #[must_use]
+ pub fn new_from<K1, V1>(&self) -> HashMap<K1, V1, S>
+ where
+ K1: Hash + Eq + Clone,
+ V1: Clone,
+ {
+ let pool = HashMapPool::default();
+ let root = PoolRef::default(&pool.0);
+ HashMap {
+ size: 0,
+ pool,
+ root,
+ hasher: self.hasher.clone(),
+ }
+ }
+
+ /// Get an iterator over the key/value pairs of a hash map.
+ ///
+ /// Please note that the order is consistent between maps using
+ /// the same hasher, but no other ordering guarantee is offered.
+ /// Items will not come out in insertion order or sort order.
+ /// They will, however, come out in the same order every time for
+ /// the same map.
+ #[inline]
+ #[must_use]
+ pub fn iter(&self) -> Iter<'_, K, V> {
+ Iter {
+ it: NodeIter::new(&self.root, self.size),
+ }
+ }
+
+ /// Get an iterator over a hash map's keys.
+ ///
+ /// Please note that the order is consistent between maps using
+ /// the same hasher, but no other ordering guarantee is offered.
+ /// Items will not come out in insertion order or sort order.
+ /// They will, however, come out in the same order every time for
+ /// the same map.
+ #[inline]
+ #[must_use]
+ pub fn keys(&self) -> Keys<'_, K, V> {
+ Keys {
+ it: NodeIter::new(&self.root, self.size),
+ }
+ }
+
+ /// Get an iterator over a hash map's values.
+ ///
+ /// Please note that the order is consistent between maps using
+ /// the same hasher, but no other ordering guarantee is offered.
+ /// Items will not come out in insertion order or sort order.
+ /// They will, however, come out in the same order every time for
+ /// the same map.
+ #[inline]
+ #[must_use]
+ pub fn values(&self) -> Values<'_, K, V> {
+ Values {
+ it: NodeIter::new(&self.root, self.size),
+ }
+ }
+
+ /// Discard all elements from the map.
+ ///
+ /// This leaves you with an empty map, and all elements that
+ /// were previously inside it are dropped.
+ ///
+ /// Time: O(n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::HashMap;
+ /// let mut map = hashmap![1=>1, 2=>2, 3=>3];
+ /// map.clear();
+ /// assert!(map.is_empty());
+ /// ```
+ pub fn clear(&mut self) {
+ if !self.is_empty() {
+ self.root = PoolRef::default(&self.pool.0);
+ self.size = 0;
+ }
+ }
+}
+
+impl<K, V, S> HashMap<K, V, S>
+where
+ K: Hash + Eq,
+ S: BuildHasher,
+{
+ fn test_eq(&self, other: &Self) -> bool
+ where
+ K: Hash + Eq,
+ V: PartialEq,
+ {
+ if self.len() != other.len() {
+ return false;
+ }
+ let mut seen = collections::HashSet::new();
+ for (key, value) in self.iter() {
+ if Some(value) != other.get(key) {
+ return false;
+ }
+ seen.insert(key);
+ }
+ for key in other.keys() {
+ if !seen.contains(&key) {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Get the value for a key from a hash map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map = hashmap!{123 => "lol"};
+ /// assert_eq!(
+ /// map.get(&123),
+ /// Some(&"lol")
+ /// );
+ /// ```
+ #[must_use]
+ pub fn get<BK>(&self, key: &BK) -> Option<&V>
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.root
+ .get(hash_key(&*self.hasher, key), 0, key)
+ .map(|&(_, ref v)| v)
+ }
+
+ /// Get the key/value pair for a key from a hash map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map = hashmap!{123 => "lol"};
+ /// assert_eq!(
+ /// map.get_key_value(&123),
+ /// Some((&123, &"lol"))
+ /// );
+ /// ```
+ #[must_use]
+ pub fn get_key_value<BK>(&self, key: &BK) -> Option<(&K, &V)>
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.root
+ .get(hash_key(&*self.hasher, key), 0, key)
+ .map(|&(ref k, ref v)| (k, v))
+ }
+
+ /// Test for the presence of a key in a hash map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map = hashmap!{123 => "lol"};
+ /// assert!(
+ /// map.contains_key(&123)
+ /// );
+ /// assert!(
+ /// !map.contains_key(&321)
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn contains_key<BK>(&self, k: &BK) -> bool
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.get(k).is_some()
+ }
+
+ /// Test whether a map is a submap of another map, meaning that
+ /// all keys in our map must also be in the other map, with the
+ /// same values.
+ ///
+ /// Use the provided function to decide whether values are equal.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn is_submap_by<B, RM, F>(&self, other: RM, mut cmp: F) -> bool
+ where
+ F: FnMut(&V, &B) -> bool,
+ RM: Borrow<HashMap<K, B, S>>,
+ {
+ self.iter()
+ .all(|(k, v)| other.borrow().get(k).map(|ov| cmp(v, ov)).unwrap_or(false))
+ }
+
+ /// Test whether a map is a proper submap of another map, meaning
+ /// that all keys in our map must also be in the other map, with
+ /// the same values. To be a proper submap, ours must also contain
+ /// fewer keys than the other map.
+ ///
+ /// Use the provided function to decide whether values are equal.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn is_proper_submap_by<B, RM, F>(&self, other: RM, cmp: F) -> bool
+ where
+ F: FnMut(&V, &B) -> bool,
+ RM: Borrow<HashMap<K, B, S>>,
+ {
+ self.len() != other.borrow().len() && self.is_submap_by(other, cmp)
+ }
+
+ /// Test whether a map is a submap of another map, meaning that
+ /// all keys in our map must also be in the other map, with the
+ /// same values.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 2 => 2};
+ /// let map2 = hashmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert!(map1.is_submap(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_submap<RM>(&self, other: RM) -> bool
+ where
+ V: PartialEq,
+ RM: Borrow<Self>,
+ {
+ self.is_submap_by(other.borrow(), PartialEq::eq)
+ }
+
+ /// Test whether a map is a proper submap of another map, meaning
+ /// that all keys in our map must also be in the other map, with
+ /// the same values. To be a proper submap, ours must also contain
+ /// fewer keys than the other map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 2 => 2};
+ /// let map2 = hashmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert!(map1.is_proper_submap(map2));
+ ///
+ /// let map3 = hashmap!{1 => 1, 2 => 2};
+ /// let map4 = hashmap!{1 => 1, 2 => 2};
+ /// assert!(!map3.is_proper_submap(map4));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_proper_submap<RM>(&self, other: RM) -> bool
+ where
+ V: PartialEq,
+ RM: Borrow<Self>,
+ {
+ self.is_proper_submap_by(other.borrow(), PartialEq::eq)
+ }
+}
+
+impl<K, V, S> HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher,
+{
+ /// Get a mutable iterator over the values of a hash map.
+ ///
+ /// Please note that the order is consistent between maps using
+ /// the same hasher, but no other ordering guarantee is offered.
+ /// Items will not come out in insertion order or sort order.
+ /// They will, however, come out in the same order every time for
+ /// the same map.
+ #[inline]
+ #[must_use]
+ pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ IterMut {
+ it: NodeIterMut::new(&self.pool.0, root, self.size),
+ }
+ }
+
+ /// Get a mutable reference to the value for a key from a hash
+ /// map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let mut map = hashmap!{123 => "lol"};
+ /// if let Some(value) = map.get_mut(&123) {
+ /// *value = "omg";
+ /// }
+ /// assert_eq!(
+ /// map.get(&123),
+ /// Some(&"omg")
+ /// );
+ /// ```
+ #[must_use]
+ pub fn get_mut<BK>(&mut self, key: &BK) -> Option<&mut V>
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.get_mut(&self.pool.0, hash_key(&*self.hasher, key), 0, key) {
+ None => None,
+ Some(&mut (_, ref mut value)) => Some(value),
+ }
+ }
+
+ /// Insert a key/value mapping into a map.
+ ///
+ /// If the map already has a mapping for the given key, the
+ /// previous value is overwritten.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let mut map = hashmap!{};
+ /// map.insert(123, "123");
+ /// map.insert(456, "456");
+ /// assert_eq!(
+ /// map,
+ /// hashmap!{123 => "123", 456 => "456"}
+ /// );
+ /// ```
+ #[inline]
+ pub fn insert(&mut self, k: K, v: V) -> Option<V> {
+ let hash = hash_key(&*self.hasher, &k);
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ let result = root.insert(&self.pool.0, hash, 0, (k, v));
+ if result.is_none() {
+ self.size += 1;
+ }
+ result.map(|(_, v)| v)
+ }
+
+ /// Remove a key/value pair from a map, if it exists, and return
+ /// the removed value.
+ ///
+ /// This is a copy-on-write operation, so that the parts of the
+ /// set's structure which are shared with other sets will be
+ /// safely copied before mutating.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let mut map = hashmap!{123 => "123", 456 => "456"};
+ /// assert_eq!(Some("123"), map.remove(&123));
+ /// assert_eq!(Some("456"), map.remove(&456));
+ /// assert_eq!(None, map.remove(&789));
+ /// assert!(map.is_empty());
+ /// ```
+ pub fn remove<BK>(&mut self, k: &BK) -> Option<V>
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.remove_with_key(k).map(|(_, v)| v)
+ }
+
+ /// Remove a key/value pair from a map, if it exists, and return
+ /// the removed key and value.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let mut map = hashmap!{123 => "123", 456 => "456"};
+ /// assert_eq!(Some((123, "123")), map.remove_with_key(&123));
+ /// assert_eq!(Some((456, "456")), map.remove_with_key(&456));
+ /// assert_eq!(None, map.remove_with_key(&789));
+ /// assert!(map.is_empty());
+ /// ```
+ pub fn remove_with_key<BK>(&mut self, k: &BK) -> Option<(K, V)>
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ let result = root.remove(&self.pool.0, hash_key(&*self.hasher, k), 0, k);
+ if result.is_some() {
+ self.size -= 1;
+ }
+ result
+ }
+
+ /// Get the [`Entry`][Entry] for a key in the map for in-place manipulation.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [Entry]: enum.Entry.html
+ #[must_use]
+ pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
+ let hash = hash_key(&*self.hasher, &key);
+ if self.root.get(hash, 0, &key).is_some() {
+ Entry::Occupied(OccupiedEntry {
+ map: self,
+ hash,
+ key,
+ })
+ } else {
+ Entry::Vacant(VacantEntry {
+ map: self,
+ hash,
+ key,
+ })
+ }
+ }
+
+ /// Construct a new hash map by inserting a key/value mapping into a map.
+ ///
+ /// If the map already has a mapping for the given key, the previous value
+ /// is overwritten.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map = hashmap!{};
+ /// assert_eq!(
+ /// map.update(123, "123"),
+ /// hashmap!{123 => "123"}
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn update(&self, k: K, v: V) -> Self {
+ let mut out = self.clone();
+ out.insert(k, v);
+ out
+ }
+
+ /// Construct a new hash map by inserting a key/value mapping into
+ /// a map.
+ ///
+ /// If the map already has a mapping for the given key, we call
+ /// the provided function with the old value and the new value,
+ /// and insert the result as the new value.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn update_with<F>(&self, k: K, v: V, f: F) -> Self
+ where
+ F: FnOnce(V, V) -> V,
+ {
+ match self.extract_with_key(&k) {
+ None => self.update(k, v),
+ Some((_, v2, m)) => m.update(k, f(v2, v)),
+ }
+ }
+
+ /// Construct a new map by inserting a key/value mapping into a
+ /// map.
+ ///
+ /// If the map already has a mapping for the given key, we call
+ /// the provided function with the key, the old value and the new
+ /// value, and insert the result as the new value.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn update_with_key<F>(&self, k: K, v: V, f: F) -> Self
+ where
+ F: FnOnce(&K, V, V) -> V,
+ {
+ match self.extract_with_key(&k) {
+ None => self.update(k, v),
+ Some((_, v2, m)) => {
+ let out_v = f(&k, v2, v);
+ m.update(k, out_v)
+ }
+ }
+ }
+
+ /// Construct a new map by inserting a key/value mapping into a
+ /// map, returning the old value for the key as well as the new
+ /// map.
+ ///
+ /// If the map already has a mapping for the given key, we call
+ /// the provided function with the key, the old value and the new
+ /// value, and insert the result as the new value.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn update_lookup_with_key<F>(&self, k: K, v: V, f: F) -> (Option<V>, Self)
+ where
+ F: FnOnce(&K, &V, V) -> V,
+ {
+ match self.extract_with_key(&k) {
+ None => (None, self.update(k, v)),
+ Some((_, v2, m)) => {
+ let out_v = f(&k, &v2, v);
+ (Some(v2), m.update(k, out_v))
+ }
+ }
+ }
+
+ /// Update the value for a given key by calling a function with
+ /// the current value and overwriting it with the function's
+ /// return value.
+ ///
+ /// The function gets an [`Option<V>`][std::option::Option] and
+ /// returns the same, so that it can decide to delete a mapping
+ /// instead of updating the value, and decide what to do if the
+ /// key isn't in the map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [std::option::Option]: https://doc.rust-lang.org/std/option/enum.Option.html
+ #[must_use]
+ pub fn alter<F>(&self, f: F, k: K) -> Self
+ where
+ F: FnOnce(Option<V>) -> Option<V>,
+ {
+ let pop = self.extract_with_key(&k);
+ match (f(pop.as_ref().map(|&(_, ref v, _)| v.clone())), pop) {
+ (None, None) => self.clone(),
+ (Some(v), None) => self.update(k, v),
+ (None, Some((_, _, m))) => m,
+ (Some(v), Some((_, _, m))) => m.update(k, v),
+ }
+ }
+
+ /// Construct a new map without the given key.
+ ///
+ /// Construct a map that's a copy of the current map, absent the
+ /// mapping for `key` if it's present.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn without<BK>(&self, k: &BK) -> Self
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ match self.extract_with_key(k) {
+ None => self.clone(),
+ Some((_, _, map)) => map,
+ }
+ }
+
+ /// Filter out values from a map which don't satisfy a predicate.
+ ///
+ /// This is slightly more efficient than filtering using an
+ /// iterator, in that it doesn't need to rehash the retained
+ /// values, but it still needs to reconstruct the entire tree
+ /// structure of the map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::HashMap;
+ /// let mut map = hashmap!{1 => 1, 2 => 2, 3 => 3};
+ /// map.retain(|k, v| *k > 1);
+ /// let expected = hashmap!{2 => 2, 3 => 3};
+ /// assert_eq!(expected, map);
+ /// ```
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&K, &V) -> bool,
+ {
+ let old_root = self.root.clone();
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ for ((key, value), hash) in NodeIter::new(&old_root, self.size) {
+ if !f(key, value) && root.remove(&self.pool.0, hash, 0, key).is_some() {
+ self.size -= 1;
+ }
+ }
+ }
+
+ /// Remove a key/value pair from a map, if it exists, and return
+ /// the removed value as well as the updated map.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn extract<BK>(&self, k: &BK) -> Option<(V, Self)>
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.extract_with_key(k).map(|(_, v, m)| (v, m))
+ }
+
+ /// Remove a key/value pair from a map, if it exists, and return
+ /// the removed key and value as well as the updated list.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn extract_with_key<BK>(&self, k: &BK) -> Option<(K, V, Self)>
+ where
+ BK: Hash + Eq + ?Sized,
+ K: Borrow<BK>,
+ {
+ let mut out = self.clone();
+ out.remove_with_key(k).map(|(k, v)| (k, v, out))
+ }
+
+ /// Construct the union of two maps, keeping the values in the
+ /// current map when keys exist in both maps.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 3 => 3};
+ /// let map2 = hashmap!{2 => 2, 3 => 4};
+ /// let expected = hashmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert_eq!(expected, map1.union(map2));
+ /// ```
+ #[must_use]
+ pub fn union(self, other: Self) -> Self {
+ let (mut to_mutate, to_consume) = if self.len() >= other.len() {
+ (self, other)
+ } else {
+ (other, self)
+ };
+ for (k, v) in to_consume {
+ to_mutate.entry(k).or_insert(v);
+ }
+ to_mutate
+ }
+
+ /// Construct the union of two maps, using a function to decide
+ /// what to do with the value when a key is in both maps.
+ ///
+ /// The function is called when a value exists in both maps, and
+ /// receives the value from the current map as its first argument,
+ /// and the value from the other map as the second. It should
+ /// return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ #[inline]
+ #[must_use]
+ pub fn union_with<F>(self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(V, V) -> V,
+ {
+ self.union_with_key(other, |_, v1, v2| f(v1, v2))
+ }
+
+ /// Construct the union of two maps, using a function to decide
+ /// what to do with the value when a key is in both maps.
+ ///
+ /// The function is called when a value exists in both maps, and
+ /// receives a reference to the key as its first argument, the
+ /// value from the current map as the second argument, and the
+ /// value from the other map as the third argument. It should
+ /// return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 3 => 4};
+ /// let map2 = hashmap!{2 => 2, 3 => 5};
+ /// let expected = hashmap!{1 => 1, 2 => 2, 3 => 9};
+ /// assert_eq!(expected, map1.union_with_key(
+ /// map2,
+ /// |key, left, right| left + right
+ /// ));
+ /// ```
+ #[must_use]
+ pub fn union_with_key<F>(self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> V,
+ {
+ if self.len() >= other.len() {
+ self.union_with_key_inner(other, f)
+ } else {
+ other.union_with_key_inner(self, |key, other_value, self_value| {
+ f(key, self_value, other_value)
+ })
+ }
+ }
+
+ fn union_with_key_inner<F>(mut self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> V,
+ {
+ for (key, right_value) in other {
+ match self.remove(&key) {
+ None => {
+ self.insert(key, right_value);
+ }
+ Some(left_value) => {
+ let final_value = f(&key, left_value, right_value);
+ self.insert(key, final_value);
+ }
+ }
+ }
+ self
+ }
+
+ /// Construct the union of a sequence of maps, selecting the value
+ /// of the leftmost when a key appears in more than one map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 3 => 3};
+ /// let map2 = hashmap!{2 => 2};
+ /// let expected = hashmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert_eq!(expected, HashMap::unions(vec![map1, map2]));
+ /// ```
+ #[must_use]
+ pub fn unions<I>(i: I) -> Self
+ where
+ S: Default,
+ I: IntoIterator<Item = Self>,
+ {
+ i.into_iter().fold(Self::default(), Self::union)
+ }
+
+ /// Construct the union of a sequence of maps, using a function to
+ /// decide what to do with the value when a key is in more than
+ /// one map.
+ ///
+ /// The function is called when a value exists in multiple maps,
+ /// and receives the value from the current map as its first
+ /// argument, and the value from the next map as the second. It
+ /// should return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn unions_with<I, F>(i: I, f: F) -> Self
+ where
+ S: Default,
+ I: IntoIterator<Item = Self>,
+ F: Fn(V, V) -> V,
+ {
+ i.into_iter()
+ .fold(Self::default(), |a, b| a.union_with(b, &f))
+ }
+
+ /// Construct the union of a sequence of maps, using a function to
+ /// decide what to do with the value when a key is in more than
+ /// one map.
+ ///
+ /// The function is called when a value exists in multiple maps,
+ /// and receives a reference to the key as its first argument, the
+ /// value from the current map as the second argument, and the
+ /// value from the next map as the third argument. It should
+ /// return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn unions_with_key<I, F>(i: I, f: F) -> Self
+ where
+ S: Default,
+ I: IntoIterator<Item = Self>,
+ F: Fn(&K, V, V) -> V,
+ {
+ i.into_iter()
+ .fold(Self::default(), |a, b| a.union_with_key(b, &f))
+ }
+
+ /// Construct the symmetric difference between two maps by discarding keys
+ /// which occur in both maps.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference`][symmetric_difference] method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 3 => 4};
+ /// let map2 = hashmap!{2 => 2, 3 => 5};
+ /// let expected = hashmap!{1 => 1, 2 => 2};
+ /// assert_eq!(expected, map1.difference(map2));
+ /// ```
+ ///
+ /// [symmetric_difference]: #method.symmetric_difference
+ #[inline]
+ #[must_use]
+ pub fn difference(self, other: Self) -> Self {
+ self.symmetric_difference(other)
+ }
+
+ /// Construct the symmetric difference between two maps by discarding keys
+ /// which occur in both maps.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 3 => 4};
+ /// let map2 = hashmap!{2 => 2, 3 => 5};
+ /// let expected = hashmap!{1 => 1, 2 => 2};
+ /// assert_eq!(expected, map1.symmetric_difference(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn symmetric_difference(self, other: Self) -> Self {
+ self.symmetric_difference_with_key(other, |_, _, _| None)
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference_with`][symmetric_difference_with] method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// [symmetric_difference_with]: #method.symmetric_difference_with
+ #[inline]
+ #[must_use]
+ pub fn difference_with<F>(self, other: Self, f: F) -> Self
+ where
+ F: FnMut(V, V) -> Option<V>,
+ {
+ self.symmetric_difference_with(other, f)
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both.
+ ///
+ /// Time: O(n log n)
+ #[inline]
+ #[must_use]
+ pub fn symmetric_difference_with<F>(self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(V, V) -> Option<V>,
+ {
+ self.symmetric_difference_with_key(other, |_, a, b| f(a, b))
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both. The function
+ /// receives the key as well as both values.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference_with`_key][symmetric_difference_with_key]
+ /// method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 3 => 4};
+ /// let map2 = hashmap!{2 => 2, 3 => 5};
+ /// let expected = hashmap!{1 => 1, 2 => 2, 3 => 9};
+ /// assert_eq!(expected, map1.difference_with_key(
+ /// map2,
+ /// |key, left, right| Some(left + right)
+ /// ));
+ /// ```
+ ///
+ /// [symmetric_difference_with_key]: #method.symmetric_difference_with_key
+ #[must_use]
+ pub fn difference_with_key<F>(self, other: Self, f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> Option<V>,
+ {
+ self.symmetric_difference_with_key(other, f)
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both. The function
+ /// receives the key as well as both values.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 3 => 4};
+ /// let map2 = hashmap!{2 => 2, 3 => 5};
+ /// let expected = hashmap!{1 => 1, 2 => 2, 3 => 9};
+ /// assert_eq!(expected, map1.symmetric_difference_with_key(
+ /// map2,
+ /// |key, left, right| Some(left + right)
+ /// ));
+ /// ```
+ #[must_use]
+ pub fn symmetric_difference_with_key<F>(mut self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> Option<V>,
+ {
+ let mut out = self.new_from();
+ for (key, right_value) in other {
+ match self.remove(&key) {
+ None => {
+ out.insert(key, right_value);
+ }
+ Some(left_value) => {
+ if let Some(final_value) = f(&key, left_value, right_value) {
+ out.insert(key, final_value);
+ }
+ }
+ }
+ }
+ out.union(self)
+ }
+
+ /// Construct the relative complement between two maps by discarding keys
+ /// which occur in `other`.
+ ///
+ /// Time: O(m log n) where m is the size of the other map
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 4};
+ /// let map2 = ordmap!{2 => 2, 3 => 5};
+ /// let expected = ordmap!{1 => 1};
+ /// assert_eq!(expected, map1.relative_complement(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn relative_complement(mut self, other: Self) -> Self {
+ for (key, _) in other {
+ let _ = self.remove(&key);
+ }
+ self
+ }
+
+ /// Construct the intersection of two maps, keeping the values
+ /// from the current map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 2 => 2};
+ /// let map2 = hashmap!{2 => 3, 3 => 4};
+ /// let expected = hashmap!{2 => 2};
+ /// assert_eq!(expected, map1.intersection(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn intersection(self, other: Self) -> Self {
+ self.intersection_with_key(other, |_, v, _| v)
+ }
+
+ /// Construct the intersection of two maps, calling a function
+ /// with both values for each key and using the result as the
+ /// value for the key.
+ ///
+ /// Time: O(n log n)
+ #[inline]
+ #[must_use]
+ pub fn intersection_with<B, C, F>(self, other: HashMap<K, B, S>, mut f: F) -> HashMap<K, C, S>
+ where
+ B: Clone,
+ C: Clone,
+ F: FnMut(V, B) -> C,
+ {
+ self.intersection_with_key(other, |_, v1, v2| f(v1, v2))
+ }
+
+ /// Construct the intersection of two maps, calling a function
+ /// with the key and both values for each key and using the result
+ /// as the value for the key.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashmap::HashMap;
+ /// let map1 = hashmap!{1 => 1, 2 => 2};
+ /// let map2 = hashmap!{2 => 3, 3 => 4};
+ /// let expected = hashmap!{2 => 5};
+ /// assert_eq!(expected, map1.intersection_with_key(
+ /// map2,
+ /// |key, left, right| left + right
+ /// ));
+ /// ```
+ #[must_use]
+ pub fn intersection_with_key<B, C, F>(
+ mut self,
+ other: HashMap<K, B, S>,
+ mut f: F,
+ ) -> HashMap<K, C, S>
+ where
+ B: Clone,
+ C: Clone,
+ F: FnMut(&K, V, B) -> C,
+ {
+ let mut out = self.new_from();
+ for (key, right_value) in other {
+ match self.remove(&key) {
+ None => (),
+ Some(left_value) => {
+ let result = f(&key, left_value, right_value);
+ out.insert(key, result);
+ }
+ }
+ }
+ out
+ }
+}
+
+// Entries
+
+/// A handle for a key and its associated value.
+///
+/// ## Performance Note
+///
+/// When using an `Entry`, the key is only ever hashed once, when you
+/// create the `Entry`. Operations on an `Entry` will never trigger a
+/// rehash, where eg. a `contains_key(key)` followed by an
+/// `insert(key, default_value)` (the equivalent of
+/// `Entry::or_insert()`) would need to hash the key once for the
+/// `contains_key` and again for the `insert`. The operations
+/// generally perform similarly otherwise.
+pub enum Entry<'a, K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher,
+{
+ /// An entry which exists in the map.
+ Occupied(OccupiedEntry<'a, K, V, S>),
+ /// An entry which doesn't exist in the map.
+ Vacant(VacantEntry<'a, K, V, S>),
+}
+
+impl<'a, K, V, S> Entry<'a, K, V, S>
+where
+ K: 'a + Hash + Eq + Clone,
+ V: 'a + Clone,
+ S: 'a + BuildHasher,
+{
+ /// Insert the default value provided if there was no value
+ /// already, and return a mutable reference to the value.
+ pub fn or_insert(self, default: V) -> &'a mut V {
+ self.or_insert_with(|| default)
+ }
+
+ /// Insert the default value from the provided function if there
+ /// was no value already, and return a mutable reference to the
+ /// value.
+ pub fn or_insert_with<F>(self, default: F) -> &'a mut V
+ where
+ F: FnOnce() -> V,
+ {
+ match self {
+ Entry::Occupied(entry) => entry.into_mut(),
+ Entry::Vacant(entry) => entry.insert(default()),
+ }
+ }
+
+ /// Insert a default value if there was no value already, and
+ /// return a mutable reference to the value.
+ pub fn or_default(self) -> &'a mut V
+ where
+ V: Default,
+ {
+ self.or_insert_with(Default::default)
+ }
+
+ /// Get the key for this entry.
+ #[must_use]
+ pub fn key(&self) -> &K {
+ match self {
+ Entry::Occupied(entry) => entry.key(),
+ Entry::Vacant(entry) => entry.key(),
+ }
+ }
+
+ /// Call the provided function to modify the value if the value
+ /// exists.
+ pub fn and_modify<F>(mut self, f: F) -> Self
+ where
+ F: FnOnce(&mut V),
+ {
+ match &mut self {
+ Entry::Occupied(ref mut entry) => f(entry.get_mut()),
+ Entry::Vacant(_) => (),
+ }
+ self
+ }
+}
+
+/// An entry for a mapping that already exists in the map.
+pub struct OccupiedEntry<'a, K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher,
+{
+ map: &'a mut HashMap<K, V, S>,
+ hash: HashBits,
+ key: K,
+}
+
+impl<'a, K, V, S> OccupiedEntry<'a, K, V, S>
+where
+ K: 'a + Hash + Eq + Clone,
+ V: 'a + Clone,
+ S: 'a + BuildHasher,
+{
+ /// Get the key for this entry.
+ #[must_use]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Remove this entry from the map and return the removed mapping.
+ pub fn remove_entry(self) -> (K, V) {
+ let root = PoolRef::make_mut(&self.map.pool.0, &mut self.map.root);
+ let result = root.remove(&self.map.pool.0, self.hash, 0, &self.key);
+ self.map.size -= 1;
+ result.unwrap()
+ }
+
+ /// Get the current value.
+ #[must_use]
+ pub fn get(&self) -> &V {
+ &self.map.root.get(self.hash, 0, &self.key).unwrap().1
+ }
+
+ /// Get a mutable reference to the current value.
+ #[must_use]
+ pub fn get_mut(&mut self) -> &mut V {
+ let root = PoolRef::make_mut(&self.map.pool.0, &mut self.map.root);
+ &mut root
+ .get_mut(&self.map.pool.0, self.hash, 0, &self.key)
+ .unwrap()
+ .1
+ }
+
+ /// Convert this entry into a mutable reference.
+ #[must_use]
+ pub fn into_mut(self) -> &'a mut V {
+ let root = PoolRef::make_mut(&self.map.pool.0, &mut self.map.root);
+ &mut root
+ .get_mut(&self.map.pool.0, self.hash, 0, &self.key)
+ .unwrap()
+ .1
+ }
+
+ /// Overwrite the current value.
+ pub fn insert(&mut self, value: V) -> V {
+ mem::replace(self.get_mut(), value)
+ }
+
+ /// Remove this entry from the map and return the removed value.
+ pub fn remove(self) -> V {
+ self.remove_entry().1
+ }
+}
+
+/// An entry for a mapping that does not already exist in the map.
+pub struct VacantEntry<'a, K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher,
+{
+ map: &'a mut HashMap<K, V, S>,
+ hash: HashBits,
+ key: K,
+}
+
+impl<'a, K, V, S> VacantEntry<'a, K, V, S>
+where
+ K: 'a + Hash + Eq + Clone,
+ V: 'a + Clone,
+ S: 'a + BuildHasher,
+{
+ /// Get the key for this entry.
+ #[must_use]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Convert this entry into its key.
+ #[must_use]
+ pub fn into_key(self) -> K {
+ self.key
+ }
+
+ /// Insert a value into this entry.
+ pub fn insert(self, value: V) -> &'a mut V {
+ let root = PoolRef::make_mut(&self.map.pool.0, &mut self.map.root);
+ if root
+ .insert(&self.map.pool.0, self.hash, 0, (self.key.clone(), value))
+ .is_none()
+ {
+ self.map.size += 1;
+ }
+ // TODO it's unfortunate that we need to look up the key again
+ // here to get the mut ref.
+ &mut root
+ .get_mut(&self.map.pool.0, self.hash, 0, &self.key)
+ .unwrap()
+ .1
+ }
+}
+
+// Core traits
+
+impl<K, V, S> Clone for HashMap<K, V, S>
+where
+ K: Clone,
+ V: Clone,
+{
+ /// Clone a map.
+ ///
+ /// Time: O(1)
+ #[inline]
+ fn clone(&self) -> Self {
+ HashMap {
+ root: self.root.clone(),
+ pool: self.pool.clone(),
+ size: self.size,
+ hasher: self.hasher.clone(),
+ }
+ }
+}
+
+#[cfg(not(has_specialisation))]
+impl<K, V, S> PartialEq for HashMap<K, V, S>
+where
+ K: Hash + Eq,
+ V: PartialEq,
+ S: BuildHasher,
+{
+ fn eq(&self, other: &Self) -> bool {
+ self.test_eq(other)
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K, V, S> PartialEq for HashMap<K, V, S>
+where
+ K: Hash + Eq,
+ V: PartialEq,
+ S: BuildHasher,
+{
+ default fn eq(&self, other: &Self) -> bool {
+ self.test_eq(other)
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K, V, S> PartialEq for HashMap<K, V, S>
+where
+ K: Hash + Eq,
+ V: Eq,
+ S: BuildHasher,
+{
+ fn eq(&self, other: &Self) -> bool {
+ if PoolRef::ptr_eq(&self.root, &other.root) {
+ return true;
+ }
+ self.test_eq(other)
+ }
+}
+
+impl<K, V, S> Eq for HashMap<K, V, S>
+where
+ K: Hash + Eq,
+ V: Eq,
+ S: BuildHasher,
+{
+}
+
+impl<K, V, S> PartialOrd for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone + PartialOrd,
+ V: PartialOrd + Clone,
+ S: BuildHasher,
+{
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ if Ref::ptr_eq(&self.hasher, &other.hasher) {
+ return self.iter().partial_cmp(other.iter());
+ }
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+impl<K, V, S> Ord for HashMap<K, V, S>
+where
+ K: Hash + Eq + Ord + Clone,
+ V: Ord + Clone,
+ S: BuildHasher,
+{
+ fn cmp(&self, other: &Self) -> Ordering {
+ if Ref::ptr_eq(&self.hasher, &other.hasher) {
+ return self.iter().cmp(other.iter());
+ }
+ self.iter().cmp(other.iter())
+ }
+}
+
+impl<K, V, S> Hash for HashMap<K, V, S>
+where
+ K: Hash + Eq,
+ V: Hash,
+ S: BuildHasher,
+{
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ for i in self.iter() {
+ i.hash(state);
+ }
+ }
+}
+
+impl<K, V, S> Default for HashMap<K, V, S>
+where
+ S: BuildHasher + Default,
+{
+ #[inline]
+ fn default() -> Self {
+ let pool = HashMapPool::default();
+ let root = PoolRef::default(&pool.0);
+ HashMap {
+ size: 0,
+ pool,
+ root,
+ hasher: Ref::<S>::default(),
+ }
+ }
+}
+
+impl<K, V, S> Add for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher,
+{
+ type Output = HashMap<K, V, S>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.union(other)
+ }
+}
+
+impl<'a, K, V, S> Add for &'a HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher,
+{
+ type Output = HashMap<K, V, S>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.clone().union(other.clone())
+ }
+}
+
+impl<K, V, S> Sum for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn sum<I>(it: I) -> Self
+ where
+ I: Iterator<Item = Self>,
+ {
+ it.fold(Self::default(), |a, b| a + b)
+ }
+}
+
+impl<K, V, S, RK, RV> Extend<(RK, RV)> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone + From<RK>,
+ V: Clone + From<RV>,
+ S: BuildHasher,
+{
+ fn extend<I>(&mut self, iter: I)
+ where
+ I: IntoIterator<Item = (RK, RV)>,
+ {
+ for (key, value) in iter {
+ self.insert(From::from(key), From::from(value));
+ }
+ }
+}
+
+impl<'a, BK, K, V, S> Index<&'a BK> for HashMap<K, V, S>
+where
+ BK: Hash + Eq + ?Sized,
+ K: Hash + Eq + Borrow<BK>,
+ S: BuildHasher,
+{
+ type Output = V;
+
+ fn index(&self, key: &BK) -> &Self::Output {
+ match self.root.get(hash_key(&*self.hasher, key), 0, key) {
+ None => panic!("HashMap::index: invalid key"),
+ Some(&(_, ref value)) => value,
+ }
+ }
+}
+
+impl<'a, BK, K, V, S> IndexMut<&'a BK> for HashMap<K, V, S>
+where
+ BK: Hash + Eq + ?Sized,
+ K: Hash + Eq + Clone + Borrow<BK>,
+ V: Clone,
+ S: BuildHasher,
+{
+ fn index_mut(&mut self, key: &BK) -> &mut Self::Output {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.get_mut(&self.pool.0, hash_key(&*self.hasher, key), 0, key) {
+ None => panic!("HashMap::index_mut: invalid key"),
+ Some(&mut (_, ref mut value)) => value,
+ }
+ }
+}
+
+#[cfg(not(has_specialisation))]
+impl<K, V, S> Debug for HashMap<K, V, S>
+where
+ K: Hash + Eq + Debug,
+ V: Debug,
+ S: BuildHasher,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ let mut d = f.debug_map();
+ for (k, v) in self {
+ d.entry(k, v);
+ }
+ d.finish()
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K, V, S> Debug for HashMap<K, V, S>
+where
+ K: Hash + Eq + Debug,
+ V: Debug,
+ S: BuildHasher,
+{
+ default fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ let mut d = f.debug_map();
+ for (k, v) in self {
+ d.entry(k, v);
+ }
+ d.finish()
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K, V, S> Debug for HashMap<K, V, S>
+where
+ K: Hash + Eq + Ord + Debug,
+ V: Debug,
+ S: BuildHasher,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ let mut keys = collections::BTreeSet::new();
+ keys.extend(self.keys());
+ let mut d = f.debug_map();
+ for key in keys {
+ d.entry(key, &self[key]);
+ }
+ d.finish()
+ }
+}
+
+// // Iterators
+
+/// An iterator over the elements of a map.
+pub struct Iter<'a, K, V> {
+ it: NodeIter<'a, (K, V)>,
+}
+
+impl<'a, K, V> Iterator for Iter<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|((k, v), _)| (k, v))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {}
+
+impl<'a, K, V> FusedIterator for Iter<'a, K, V> {}
+
+/// A mutable iterator over the elements of a map.
+pub struct IterMut<'a, K, V>
+where
+ K: Clone,
+ V: Clone,
+{
+ it: NodeIterMut<'a, (K, V)>,
+}
+
+impl<'a, K, V> Iterator for IterMut<'a, K, V>
+where
+ K: Clone,
+ V: Clone,
+{
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|((k, v), _)| (&*k, v))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V>
+where
+ K: Clone,
+ V: Clone,
+{
+}
+
+impl<'a, K, V> FusedIterator for IterMut<'a, K, V>
+where
+ K: Clone,
+ V: Clone,
+{
+}
+
+/// A consuming iterator over the elements of a map.
+pub struct ConsumingIter<A: HashValue> {
+ it: NodeDrain<A>,
+}
+
+impl<A> Iterator for ConsumingIter<A>
+where
+ A: HashValue + Clone,
+{
+ type Item = A;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|(p, _)| p)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<A> ExactSizeIterator for ConsumingIter<A> where A: HashValue + Clone {}
+
+impl<A> FusedIterator for ConsumingIter<A> where A: HashValue + Clone {}
+
+/// An iterator over the keys of a map.
+pub struct Keys<'a, K, V> {
+ it: NodeIter<'a, (K, V)>,
+}
+
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+ type Item = &'a K;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|((k, _), _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {}
+
+impl<'a, K, V> FusedIterator for Keys<'a, K, V> {}
+
+/// An iterator over the values of a map.
+pub struct Values<'a, K, V> {
+ it: NodeIter<'a, (K, V)>,
+}
+
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+ type Item = &'a V;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|((_, v), _)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {}
+
+impl<'a, K, V> FusedIterator for Values<'a, K, V> {}
+
+impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S>
+where
+ K: Hash + Eq,
+ S: BuildHasher,
+{
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<K, V, S> IntoIterator for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher,
+{
+ type Item = (K, V);
+ type IntoIter = ConsumingIter<(K, V)>;
+
+ #[inline]
+ fn into_iter(self) -> Self::IntoIter {
+ ConsumingIter {
+ it: NodeDrain::new(&self.pool.0, self.root, self.size),
+ }
+ }
+}
+
+// Conversions
+
+impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from_iter<T>(i: T) -> Self
+ where
+ T: IntoIterator<Item = (K, V)>,
+ {
+ let mut map = Self::default();
+ for (k, v) in i {
+ map.insert(k, v);
+ }
+ map
+ }
+}
+
+impl<K, V, S> AsRef<HashMap<K, V, S>> for HashMap<K, V, S> {
+ #[inline]
+ fn as_ref(&self) -> &Self {
+ self
+ }
+}
+
+impl<'m, 'k, 'v, K, V, OK, OV, SA, SB> From<&'m HashMap<&'k K, &'v V, SA>> for HashMap<OK, OV, SB>
+where
+ K: Hash + Eq + ToOwned<Owned = OK> + ?Sized,
+ V: ToOwned<Owned = OV> + ?Sized,
+ OK: Hash + Eq + Clone + Borrow<K>,
+ OV: Borrow<V> + Clone,
+ SA: BuildHasher,
+ SB: BuildHasher + Default,
+{
+ fn from(m: &HashMap<&K, &V, SA>) -> Self {
+ m.iter()
+ .map(|(k, v)| ((*k).to_owned(), (*v).to_owned()))
+ .collect()
+ }
+}
+
+impl<'a, K, V, S> From<&'a [(K, V)]> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from(m: &'a [(K, V)]) -> Self {
+ m.iter().cloned().collect()
+ }
+}
+
+impl<K, V, S> From<Vec<(K, V)>> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from(m: Vec<(K, V)>) -> Self {
+ m.into_iter().collect()
+ }
+}
+
+impl<'a, K, V, S> From<&'a Vec<(K, V)>> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from(m: &'a Vec<(K, V)>) -> Self {
+ m.iter().cloned().collect()
+ }
+}
+
+impl<K, V, S> From<collections::HashMap<K, V>> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from(m: collections::HashMap<K, V>) -> Self {
+ m.into_iter().collect()
+ }
+}
+
+impl<'a, K, V, S> From<&'a collections::HashMap<K, V>> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from(m: &'a collections::HashMap<K, V>) -> Self {
+ m.iter().map(|(k, v)| (k.clone(), v.clone())).collect()
+ }
+}
+
+impl<K, V, S> From<collections::BTreeMap<K, V>> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from(m: collections::BTreeMap<K, V>) -> Self {
+ m.into_iter().collect()
+ }
+}
+
+impl<'a, K, V, S> From<&'a collections::BTreeMap<K, V>> for HashMap<K, V, S>
+where
+ K: Hash + Eq + Clone,
+ V: Clone,
+ S: BuildHasher + Default,
+{
+ fn from(m: &'a collections::BTreeMap<K, V>) -> Self {
+ m.iter().map(|(k, v)| (k.clone(), v.clone())).collect()
+ }
+}
+
+// impl<K: Ord + Hash + Eq, V, S> From<OrdMap<K, V>> for HashMap<K, V, S>
+// where
+// S: BuildHasher + Default,
+// {
+// fn from(m: OrdMap<K, V>) -> Self {
+// m.into_iter().collect()
+// }
+// }
+
+// impl<'a, K: Ord + Hash + Eq, V, S> From<&'a OrdMap<K, V>> for HashMap<K, V, S>
+// where
+// S: BuildHasher + Default,
+// {
+// fn from(m: &'a OrdMap<K, V>) -> Self {
+// m.into_iter().collect()
+// }
+// }
+
+// Proptest
+#[cfg(any(test, feature = "proptest"))]
+#[doc(hidden)]
+pub mod proptest {
+ #[deprecated(
+ since = "14.3.0",
+ note = "proptest strategies have moved to im::proptest"
+ )]
+ pub use crate::proptest::hash_map;
+}
+
+// Tests
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::test::LolHasher;
+ use ::proptest::num::{i16, usize};
+ use ::proptest::{collection, proptest};
+ use std::hash::BuildHasherDefault;
+
+ #[test]
+ fn safe_mutation() {
+ let v1: HashMap<usize, usize> = (0..131_072).map(|i| (i, i)).collect::<HashMap<_, _>>();
+ let mut v2 = v1.clone();
+ v2.insert(131_000, 23);
+ assert_eq!(Some(&23), v2.get(&131_000));
+ assert_eq!(Some(&131_000), v1.get(&131_000));
+ }
+
+ #[test]
+ fn index_operator() {
+ let mut map = hashmap![1 => 2, 3 => 4, 5 => 6];
+ assert_eq!(4, map[&3]);
+ map[&3] = 8;
+ assert_eq!(hashmap![1 => 2, 3 => 8, 5 => 6], map);
+ }
+
+ #[test]
+ fn proper_formatting() {
+ let map = hashmap![1 => 2];
+ assert_eq!("{1: 2}", format!("{:?}", map));
+
+ assert_eq!("{}", format!("{:?}", HashMap::<(), ()>::new()));
+ }
+
+ #[test]
+ fn remove_failing() {
+ let pairs = [(1469, 0), (-67, 0)];
+ let mut m: collections::HashMap<i16, i16, _> =
+ collections::HashMap::with_hasher(BuildHasherDefault::<LolHasher>::default());
+ for &(ref k, ref v) in &pairs {
+ m.insert(*k, *v);
+ }
+ let mut map: HashMap<i16, i16, _> =
+ HashMap::with_hasher(BuildHasherDefault::<LolHasher>::default());
+ for (k, v) in &m {
+ map = map.update(*k, *v);
+ }
+ for k in m.keys() {
+ let l = map.len();
+ assert_eq!(m.get(k).cloned(), map.get(k).cloned());
+ map = map.without(k);
+ assert_eq!(None, map.get(k));
+ assert_eq!(l - 1, map.len());
+ }
+ }
+
+ #[test]
+ fn match_string_keys_with_string_slices() {
+ let mut map: HashMap<String, i32> =
+ From::from(&hashmap! { "foo" => &1, "bar" => &2, "baz" => &3 });
+ assert_eq!(Some(&1), map.get("foo"));
+ map = map.without("foo");
+ assert_eq!(Some(3), map.remove("baz"));
+ map["bar"] = 8;
+ assert_eq!(8, map["bar"]);
+ }
+
+ #[test]
+ fn macro_allows_trailing_comma() {
+ let map1 = hashmap! {"x" => 1, "y" => 2};
+ let map2 = hashmap! {
+ "x" => 1,
+ "y" => 2,
+ };
+ assert_eq!(map1, map2);
+ }
+
+ #[test]
+ fn remove_top_level_collisions() {
+ let pairs = vec![9, 2569, 27145];
+ let mut map: HashMap<i16, i16, BuildHasherDefault<LolHasher>> = Default::default();
+ for k in pairs.clone() {
+ map.insert(k, k);
+ }
+ assert_eq!(pairs.len(), map.len());
+ let keys: Vec<_> = map.keys().cloned().collect();
+ for k in keys {
+ let l = map.len();
+ assert_eq!(Some(&k), map.get(&k));
+ map.remove(&k);
+ assert_eq!(None, map.get(&k));
+ assert_eq!(l - 1, map.len());
+ }
+ }
+
+ #[test]
+ fn entry_api() {
+ let mut map = hashmap! {"bar" => 5};
+ map.entry("foo").and_modify(|v| *v += 5).or_insert(1);
+ assert_eq!(1, map[&"foo"]);
+ map.entry("foo").and_modify(|v| *v += 5).or_insert(1);
+ assert_eq!(6, map[&"foo"]);
+ map.entry("bar").and_modify(|v| *v += 5).or_insert(1);
+ assert_eq!(10, map[&"bar"]);
+ assert_eq!(
+ 10,
+ match map.entry("bar") {
+ Entry::Occupied(entry) => entry.remove(),
+ _ => panic!(),
+ }
+ );
+ assert!(!map.contains_key(&"bar"));
+ }
+
+ #[test]
+ fn refpool_crash() {
+ let _map = HashMap::<u128, usize>::new();
+ }
+
+ #[test]
+ fn large_map() {
+ let mut map = HashMap::new();
+ let size = 32769;
+ for i in 0..size {
+ map.insert(i, i);
+ }
+ assert_eq!(size, map.len());
+ for i in 0..size {
+ assert_eq!(Some(&i), map.get(&i));
+ }
+ }
+
+ proptest! {
+ #[test]
+ fn update_and_length(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..100)) {
+ let mut map: HashMap<i16, i16, BuildHasherDefault<LolHasher>> = Default::default();
+ for (index, (k, v)) in m.iter().enumerate() {
+ map = map.update(*k, *v);
+ assert_eq!(Some(v), map.get(k));
+ assert_eq!(index + 1, map.len());
+ }
+ }
+
+ #[test]
+ fn from_iterator(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..100)) {
+ let map: HashMap<i16, i16> =
+ FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ assert_eq!(m.len(), map.len());
+ }
+
+ #[test]
+ fn iterate_over(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..100)) {
+ let map: HashMap<i16, i16> = FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ assert_eq!(m.len(), map.iter().count());
+ }
+
+ #[test]
+ fn equality(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..100)) {
+ let map1: HashMap<i16, i16> = FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ let map2: HashMap<i16, i16> = FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ assert_eq!(map1, map2);
+ }
+
+ #[test]
+ fn lookup(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..100)) {
+ let map: HashMap<i16, i16> = FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ for (k, v) in m {
+ assert_eq!(Some(*v), map.get(k).cloned());
+ }
+ }
+
+ #[test]
+ fn without(ref pairs in collection::vec((i16::ANY, i16::ANY), 0..100)) {
+ let mut m: collections::HashMap<i16, i16, _> =
+ collections::HashMap::with_hasher(BuildHasherDefault::<LolHasher>::default());
+ for &(ref k, ref v) in pairs {
+ m.insert(*k, *v);
+ }
+ let mut map: HashMap<i16, i16, _> = HashMap::with_hasher(BuildHasherDefault::<LolHasher>::default());
+ for (k, v) in &m {
+ map = map.update(*k, *v);
+ }
+ for k in m.keys() {
+ let l = map.len();
+ assert_eq!(m.get(k).cloned(), map.get(k).cloned());
+ map = map.without(k);
+ assert_eq!(None, map.get(k));
+ assert_eq!(l - 1, map.len());
+ }
+ }
+
+ #[test]
+ fn insert(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..100)) {
+ let mut mut_map: HashMap<i16, i16, BuildHasherDefault<LolHasher>> = Default::default();
+ let mut map: HashMap<i16, i16, BuildHasherDefault<LolHasher>> = Default::default();
+ for (count, (k, v)) in m.iter().enumerate() {
+ map = map.update(*k, *v);
+ mut_map.insert(*k, *v);
+ assert_eq!(count + 1, map.len());
+ assert_eq!(count + 1, mut_map.len());
+ }
+ assert_eq!(map, mut_map);
+ }
+
+ #[test]
+ fn remove(ref pairs in collection::vec((i16::ANY, i16::ANY), 0..100)) {
+ let mut m: collections::HashMap<i16, i16, _> =
+ collections::HashMap::with_hasher(BuildHasherDefault::<LolHasher>::default());
+ for &(ref k, ref v) in pairs {
+ m.insert(*k, *v);
+ }
+ let mut map: HashMap<i16, i16, _> = HashMap::with_hasher(BuildHasherDefault::<LolHasher>::default());
+ for (k, v) in &m {
+ map.insert(*k, *v);
+ }
+ for k in m.keys() {
+ let l = map.len();
+ assert_eq!(m.get(k).cloned(), map.get(k).cloned());
+ map.remove(k);
+ assert_eq!(None, map.get(k));
+ assert_eq!(l - 1, map.len());
+ }
+ }
+
+ #[test]
+ fn delete_and_reinsert(
+ ref input in collection::hash_map(i16::ANY, i16::ANY, 1..100),
+ index_rand in usize::ANY
+ ) {
+ let index = *input.keys().nth(index_rand % input.len()).unwrap();
+ let map1: HashMap<_, _> = HashMap::from_iter(input.clone());
+ let (val, map2) = map1.extract(&index).unwrap();
+ let map3 = map2.update(index, val);
+ for key in map2.keys() {
+ assert!(*key != index);
+ }
+ assert_eq!(map1.len(), map2.len() + 1);
+ assert_eq!(map1, map3);
+ }
+
+ #[test]
+ fn proptest_works(ref m in proptest::hash_map(0..9999, ".*", 10..100)) {
+ assert!(m.len() < 100);
+ assert!(m.len() >= 10);
+ }
+
+ #[test]
+ fn exact_size_iterator(ref m in proptest::hash_map(i16::ANY, i16::ANY, 0..100)) {
+ let mut should_be = m.len();
+ let mut it = m.iter();
+ loop {
+ assert_eq!(should_be, it.len());
+ match it.next() {
+ None => break,
+ Some(_) => should_be -= 1,
+ }
+ }
+ assert_eq!(0, it.len());
+ }
+ }
+}
diff --git a/vendor/im-rc/src/hash/mod.rs b/vendor/im-rc/src/hash/mod.rs
new file mode 100644
index 000000000..27a56a5e2
--- /dev/null
+++ b/vendor/im-rc/src/hash/mod.rs
@@ -0,0 +1,8 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#[macro_use]
+pub mod map;
+#[macro_use]
+pub mod set;
diff --git a/vendor/im-rc/src/hash/set.rs b/vendor/im-rc/src/hash/set.rs
new file mode 100644
index 000000000..edc4ad60c
--- /dev/null
+++ b/vendor/im-rc/src/hash/set.rs
@@ -0,0 +1,1134 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+//! An unordered set.
+//!
+//! An immutable hash set using [hash array mapped tries] [1].
+//!
+//! Most operations on this set are O(log<sub>x</sub> n) for a
+//! suitably high *x* that it should be nearly O(1) for most sets.
+//! Because of this, it's a great choice for a generic set as long as
+//! you don't mind that values will need to implement
+//! [`Hash`][std::hash::Hash] and [`Eq`][std::cmp::Eq].
+//!
+//! Values will have a predictable order based on the hasher
+//! being used. Unless otherwise specified, this will be the standard
+//! [`RandomState`][std::collections::hash_map::RandomState] hasher.
+//!
+//! [1]: https://en.wikipedia.org/wiki/Hash_array_mapped_trie
+//! [std::cmp::Eq]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+//! [std::hash::Hash]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+//! [std::collections::hash_map::RandomState]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::collections::hash_map::RandomState;
+use std::collections::{self, BTreeSet};
+use std::fmt::{Debug, Error, Formatter};
+use std::hash::{BuildHasher, Hash, Hasher};
+use std::iter::FusedIterator;
+use std::iter::{FromIterator, IntoIterator, Sum};
+use std::ops::{Add, Deref, Mul};
+
+use crate::nodes::hamt::{hash_key, Drain as NodeDrain, HashValue, Iter as NodeIter, Node};
+use crate::ordset::OrdSet;
+use crate::util::{Pool, PoolRef, Ref};
+use crate::Vector;
+
+/// Construct a set from a sequence of values.
+///
+/// # Examples
+///
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::hashset::HashSet;
+/// # fn main() {
+/// assert_eq!(
+/// hashset![1, 2, 3],
+/// HashSet::from(vec![1, 2, 3])
+/// );
+/// # }
+/// ```
+#[macro_export]
+macro_rules! hashset {
+ () => { $crate::hashset::HashSet::new() };
+
+ ( $($x:expr),* ) => {{
+ let mut l = $crate::hashset::HashSet::new();
+ $(
+ l.insert($x);
+ )*
+ l
+ }};
+
+ ( $($x:expr ,)* ) => {{
+ let mut l = $crate::hashset::HashSet::new();
+ $(
+ l.insert($x);
+ )*
+ l
+ }};
+}
+
+def_pool!(HashSetPool<A>, Node<Value<A>>);
+
+/// An unordered set.
+///
+/// An immutable hash set using [hash array mapped tries] [1].
+///
+/// Most operations on this set are O(log<sub>x</sub> n) for a
+/// suitably high *x* that it should be nearly O(1) for most sets.
+/// Because of this, it's a great choice for a generic set as long as
+/// you don't mind that values will need to implement
+/// [`Hash`][std::hash::Hash] and [`Eq`][std::cmp::Eq].
+///
+/// Values will have a predictable order based on the hasher
+/// being used. Unless otherwise specified, this will be the standard
+/// [`RandomState`][std::collections::hash_map::RandomState] hasher.
+///
+/// [1]: https://en.wikipedia.org/wiki/Hash_array_mapped_trie
+/// [std::cmp::Eq]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+/// [std::hash::Hash]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+/// [std::collections::hash_map::RandomState]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+pub struct HashSet<A, S = RandomState> {
+ hasher: Ref<S>,
+ pool: HashSetPool<A>,
+ root: PoolRef<Node<Value<A>>>,
+ size: usize,
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
+struct Value<A>(A);
+
+impl<A> Deref for Value<A> {
+ type Target = A;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+// FIXME lacking specialisation, we can't simply implement `HashValue`
+// for `A`, we have to use the `Value<A>` indirection.
+impl<A> HashValue for Value<A>
+where
+ A: Hash + Eq,
+{
+ type Key = A;
+
+ fn extract_key(&self) -> &Self::Key {
+ &self.0
+ }
+
+ fn ptr_eq(&self, _other: &Self) -> bool {
+ false
+ }
+}
+
+impl<A> HashSet<A, RandomState> {
+ /// Construct an empty set.
+ #[must_use]
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ /// Construct an empty set using a specific memory pool.
+ #[cfg(feature = "pool")]
+ #[must_use]
+ pub fn with_pool(pool: &HashSetPool<A>) -> Self {
+ Self {
+ pool: pool.clone(),
+ hasher: Default::default(),
+ size: 0,
+ root: PoolRef::default(&pool.0),
+ }
+ }
+}
+
+impl<A> HashSet<A, RandomState>
+where
+ A: Hash + Eq + Clone,
+{
+ /// Construct a set with a single value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// # use std::sync::Arc;
+ /// let set = HashSet::unit(123);
+ /// assert!(set.contains(&123));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn unit(a: A) -> Self {
+ HashSet::new().update(a)
+ }
+}
+
+impl<A, S> HashSet<A, S> {
+ /// Test whether a set is empty.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// assert!(
+ /// !hashset![1, 2, 3].is_empty()
+ /// );
+ /// assert!(
+ /// HashSet::<i32>::new().is_empty()
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Get the size of a set.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// assert_eq!(3, hashset![1, 2, 3].len());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.size
+ }
+
+ /// Test whether two sets refer to the same content in memory.
+ ///
+ /// This is true if the two sides are references to the same set,
+ /// or if the two sets refer to the same root node.
+ ///
+ /// This would return true if you're comparing a set to itself, or
+ /// if you're comparing a set to a fresh clone of itself.
+ ///
+ /// Time: O(1)
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ std::ptr::eq(self, other) || PoolRef::ptr_eq(&self.root, &other.root)
+ }
+
+ /// Get a reference to the memory pool used by this set.
+ ///
+ /// Note that if you didn't specifically construct it with a pool, you'll
+ /// get back a reference to a pool of size 0.
+ #[cfg(feature = "pool")]
+ pub fn pool(&self) -> &HashSetPool<A> {
+ &self.pool
+ }
+
+ /// Construct an empty hash set using the provided hasher.
+ #[inline]
+ #[must_use]
+ pub fn with_hasher<RS>(hasher: RS) -> Self
+ where
+ Ref<S>: From<RS>,
+ {
+ let pool = HashSetPool::default();
+ let root = PoolRef::default(&pool.0);
+ HashSet {
+ size: 0,
+ pool,
+ root,
+ hasher: From::from(hasher),
+ }
+ }
+
+ /// Construct an empty hash set using the provided memory pool and hasher.
+ #[cfg(feature = "pool")]
+ #[inline]
+ #[must_use]
+ pub fn with_pool_hasher<RS>(pool: &HashSetPool<A>, hasher: RS) -> Self
+ where
+ Ref<S>: From<RS>,
+ {
+ let root = PoolRef::default(&pool.0);
+ HashSet {
+ size: 0,
+ pool: pool.clone(),
+ root,
+ hasher: From::from(hasher),
+ }
+ }
+
+ /// Get a reference to the set's [`BuildHasher`][BuildHasher].
+ ///
+ /// [BuildHasher]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+ #[must_use]
+ pub fn hasher(&self) -> &Ref<S> {
+ &self.hasher
+ }
+
+ /// Construct an empty hash set using the same hasher as the current hash set.
+ #[inline]
+ #[must_use]
+ pub fn new_from<A1>(&self) -> HashSet<A1, S>
+ where
+ A1: Hash + Eq + Clone,
+ {
+ let pool = HashSetPool::default();
+ let root = PoolRef::default(&pool.0);
+ HashSet {
+ size: 0,
+ pool,
+ root,
+ hasher: self.hasher.clone(),
+ }
+ }
+
+ /// Discard all elements from the set.
+ ///
+ /// This leaves you with an empty set, and all elements that
+ /// were previously inside it are dropped.
+ ///
+ /// Time: O(n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::HashSet;
+ /// let mut set = hashset![1, 2, 3];
+ /// set.clear();
+ /// assert!(set.is_empty());
+ /// ```
+ pub fn clear(&mut self) {
+ if !self.is_empty() {
+ self.root = PoolRef::default(&self.pool.0);
+ self.size = 0;
+ }
+ }
+
+ /// Get an iterator over the values in a hash set.
+ ///
+ /// Please note that the order is consistent between sets using
+ /// the same hasher, but no other ordering guarantee is offered.
+ /// Items will not come out in insertion order or sort order.
+ /// They will, however, come out in the same order every time for
+ /// the same set.
+ #[must_use]
+ pub fn iter(&self) -> Iter<'_, A> {
+ Iter {
+ it: NodeIter::new(&self.root, self.size),
+ }
+ }
+}
+
+impl<A, S> HashSet<A, S>
+where
+ A: Hash + Eq,
+ S: BuildHasher,
+{
+ fn test_eq(&self, other: &Self) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+ let mut seen = collections::HashSet::new();
+ for value in self.iter() {
+ if !other.contains(value) {
+ return false;
+ }
+ seen.insert(value);
+ }
+ for value in other.iter() {
+ if !seen.contains(&value) {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Test if a value is part of a set.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn contains<BA>(&self, a: &BA) -> bool
+ where
+ BA: Hash + Eq + ?Sized,
+ A: Borrow<BA>,
+ {
+ self.root.get(hash_key(&*self.hasher, a), 0, a).is_some()
+ }
+
+ /// Test whether a set is a subset of another set, meaning that
+ /// all values in our set must also be in the other set.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn is_subset<RS>(&self, other: RS) -> bool
+ where
+ RS: Borrow<Self>,
+ {
+ let o = other.borrow();
+ self.iter().all(|a| o.contains(a))
+ }
+
+ /// Test whether a set is a proper subset of another set, meaning
+ /// that all values in our set must also be in the other set. A
+ /// proper subset must also be smaller than the other set.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn is_proper_subset<RS>(&self, other: RS) -> bool
+ where
+ RS: Borrow<Self>,
+ {
+ self.len() != other.borrow().len() && self.is_subset(other)
+ }
+}
+
+impl<A, S> HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher,
+{
+ /// Insert a value into a set.
+ ///
+ /// Time: O(log n)
+ #[inline]
+ pub fn insert(&mut self, a: A) -> Option<A> {
+ let hash = hash_key(&*self.hasher, &a);
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.insert(&self.pool.0, hash, 0, Value(a)) {
+ None => {
+ self.size += 1;
+ None
+ }
+ Some(Value(old_value)) => Some(old_value),
+ }
+ }
+
+ /// Remove a value from a set if it exists.
+ ///
+ /// Time: O(log n)
+ pub fn remove<BA>(&mut self, a: &BA) -> Option<A>
+ where
+ BA: Hash + Eq + ?Sized,
+ A: Borrow<BA>,
+ {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ let result = root.remove(&self.pool.0, hash_key(&*self.hasher, a), 0, a);
+ if result.is_some() {
+ self.size -= 1;
+ }
+ result.map(|v| v.0)
+ }
+
+ /// Construct a new set from the current set with the given value
+ /// added.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// # use std::sync::Arc;
+ /// let set = hashset![123];
+ /// assert_eq!(
+ /// set.update(456),
+ /// hashset![123, 456]
+ /// );
+ /// ```
+ #[must_use]
+ pub fn update(&self, a: A) -> Self {
+ let mut out = self.clone();
+ out.insert(a);
+ out
+ }
+
+ /// Construct a new set with the given value removed if it's in
+ /// the set.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn without<BA>(&self, a: &BA) -> Self
+ where
+ BA: Hash + Eq + ?Sized,
+ A: Borrow<BA>,
+ {
+ let mut out = self.clone();
+ out.remove(a);
+ out
+ }
+
+ /// Filter out values from a set which don't satisfy a predicate.
+ ///
+ /// This is slightly more efficient than filtering using an
+ /// iterator, in that it doesn't need to rehash the retained
+ /// values, but it still needs to reconstruct the entire tree
+ /// structure of the set.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::HashSet;
+ /// let mut set = hashset![1, 2, 3];
+ /// set.retain(|v| *v > 1);
+ /// let expected = hashset![2, 3];
+ /// assert_eq!(expected, set);
+ /// ```
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&A) -> bool,
+ {
+ let old_root = self.root.clone();
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ for (value, hash) in NodeIter::new(&old_root, self.size) {
+ if !f(value) && root.remove(&self.pool.0, hash, 0, value).is_some() {
+ self.size -= 1;
+ }
+ }
+ }
+
+ /// Construct the union of two sets.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// let set1 = hashset!{1, 2};
+ /// let set2 = hashset!{2, 3};
+ /// let expected = hashset!{1, 2, 3};
+ /// assert_eq!(expected, set1.union(set2));
+ /// ```
+ #[must_use]
+ pub fn union(self, other: Self) -> Self {
+ let (mut to_mutate, to_consume) = if self.len() >= other.len() {
+ (self, other)
+ } else {
+ (other, self)
+ };
+ for value in to_consume {
+ to_mutate.insert(value);
+ }
+ to_mutate
+ }
+
+ /// Construct the union of multiple sets.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn unions<I>(i: I) -> Self
+ where
+ I: IntoIterator<Item = Self>,
+ S: Default,
+ {
+ i.into_iter().fold(Self::default(), Self::union)
+ }
+
+ /// Construct the symmetric difference between two sets.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference`][symmetric_difference] method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// let set1 = hashset!{1, 2};
+ /// let set2 = hashset!{2, 3};
+ /// let expected = hashset!{1, 3};
+ /// assert_eq!(expected, set1.difference(set2));
+ /// ```
+ ///
+ /// [symmetric_difference]: #method.symmetric_difference
+ #[must_use]
+ pub fn difference(self, other: Self) -> Self {
+ self.symmetric_difference(other)
+ }
+
+ /// Construct the symmetric difference between two sets.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// let set1 = hashset!{1, 2};
+ /// let set2 = hashset!{2, 3};
+ /// let expected = hashset!{1, 3};
+ /// assert_eq!(expected, set1.symmetric_difference(set2));
+ /// ```
+ #[must_use]
+ pub fn symmetric_difference(mut self, other: Self) -> Self {
+ for value in other {
+ if self.remove(&value).is_none() {
+ self.insert(value);
+ }
+ }
+ self
+ }
+
+ /// Construct the relative complement between two sets, that is the set
+ /// of values in `self` that do not occur in `other`.
+ ///
+ /// Time: O(m log n) where m is the size of the other set
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set1 = ordset!{1, 2};
+ /// let set2 = ordset!{2, 3};
+ /// let expected = ordset!{1};
+ /// assert_eq!(expected, set1.relative_complement(set2));
+ /// ```
+ #[must_use]
+ pub fn relative_complement(mut self, other: Self) -> Self {
+ for value in other {
+ let _ = self.remove(&value);
+ }
+ self
+ }
+
+ /// Construct the intersection of two sets.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::hashset::HashSet;
+ /// let set1 = hashset!{1, 2};
+ /// let set2 = hashset!{2, 3};
+ /// let expected = hashset!{2};
+ /// assert_eq!(expected, set1.intersection(set2));
+ /// ```
+ #[must_use]
+ pub fn intersection(self, other: Self) -> Self {
+ let mut out = self.new_from();
+ for value in other {
+ if self.contains(&value) {
+ out.insert(value);
+ }
+ }
+ out
+ }
+}
+
+// Core traits
+
+impl<A, S> Clone for HashSet<A, S>
+where
+ A: Clone,
+{
+ /// Clone a set.
+ ///
+ /// Time: O(1)
+ #[inline]
+ fn clone(&self) -> Self {
+ HashSet {
+ hasher: self.hasher.clone(),
+ pool: self.pool.clone(),
+ root: self.root.clone(),
+ size: self.size,
+ }
+ }
+}
+
+impl<A, S> PartialEq for HashSet<A, S>
+where
+ A: Hash + Eq,
+ S: BuildHasher + Default,
+{
+ fn eq(&self, other: &Self) -> bool {
+ self.test_eq(other)
+ }
+}
+
+impl<A, S> Eq for HashSet<A, S>
+where
+ A: Hash + Eq,
+ S: BuildHasher + Default,
+{
+}
+
+impl<A, S> PartialOrd for HashSet<A, S>
+where
+ A: Hash + Eq + Clone + PartialOrd,
+ S: BuildHasher + Default,
+{
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ if Ref::ptr_eq(&self.hasher, &other.hasher) {
+ return self.iter().partial_cmp(other.iter());
+ }
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+impl<A, S> Ord for HashSet<A, S>
+where
+ A: Hash + Eq + Clone + Ord,
+ S: BuildHasher + Default,
+{
+ fn cmp(&self, other: &Self) -> Ordering {
+ if Ref::ptr_eq(&self.hasher, &other.hasher) {
+ return self.iter().cmp(other.iter());
+ }
+ self.iter().cmp(other.iter())
+ }
+}
+
+impl<A, S> Hash for HashSet<A, S>
+where
+ A: Hash + Eq,
+ S: BuildHasher + Default,
+{
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ for i in self.iter() {
+ i.hash(state);
+ }
+ }
+}
+
+impl<A, S> Default for HashSet<A, S>
+where
+ S: BuildHasher + Default,
+{
+ fn default() -> Self {
+ let pool = HashSetPool::default();
+ let root = PoolRef::default(&pool.0);
+ HashSet {
+ hasher: Ref::<S>::default(),
+ pool,
+ root,
+ size: 0,
+ }
+ }
+}
+
+impl<A, S> Add for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher,
+{
+ type Output = HashSet<A, S>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.union(other)
+ }
+}
+
+impl<A, S> Mul for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher,
+{
+ type Output = HashSet<A, S>;
+
+ fn mul(self, other: Self) -> Self::Output {
+ self.intersection(other)
+ }
+}
+
+impl<'a, A, S> Add for &'a HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher,
+{
+ type Output = HashSet<A, S>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.clone().union(other.clone())
+ }
+}
+
+impl<'a, A, S> Mul for &'a HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher,
+{
+ type Output = HashSet<A, S>;
+
+ fn mul(self, other: Self) -> Self::Output {
+ self.clone().intersection(other.clone())
+ }
+}
+
+impl<A, S> Sum for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn sum<I>(it: I) -> Self
+ where
+ I: Iterator<Item = Self>,
+ {
+ it.fold(Self::default(), |a, b| a + b)
+ }
+}
+
+impl<A, S, R> Extend<R> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone + From<R>,
+ S: BuildHasher,
+{
+ fn extend<I>(&mut self, iter: I)
+ where
+ I: IntoIterator<Item = R>,
+ {
+ for value in iter {
+ self.insert(From::from(value));
+ }
+ }
+}
+
+#[cfg(not(has_specialisation))]
+impl<A, S> Debug for HashSet<A, S>
+where
+ A: Hash + Eq + Debug,
+ S: BuildHasher,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<A, S> Debug for HashSet<A, S>
+where
+ A: Hash + Eq + Debug,
+ S: BuildHasher,
+{
+ default fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<A, S> Debug for HashSet<A, S>
+where
+ A: Hash + Eq + Debug + Ord,
+ S: BuildHasher,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+// Iterators
+
+/// An iterator over the elements of a set.
+pub struct Iter<'a, A> {
+ it: NodeIter<'a, Value<A>>,
+}
+
+impl<'a, A> Iterator for Iter<'a, A>
+where
+ A: 'a,
+{
+ type Item = &'a A;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|(v, _)| &v.0)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, A> ExactSizeIterator for Iter<'a, A> {}
+
+impl<'a, A> FusedIterator for Iter<'a, A> {}
+
+/// A consuming iterator over the elements of a set.
+pub struct ConsumingIter<A>
+where
+ A: Hash + Eq + Clone,
+{
+ it: NodeDrain<Value<A>>,
+}
+
+impl<A> Iterator for ConsumingIter<A>
+where
+ A: Hash + Eq + Clone,
+{
+ type Item = A;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|(v, _)| v.0)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<A> ExactSizeIterator for ConsumingIter<A> where A: Hash + Eq + Clone {}
+
+impl<A> FusedIterator for ConsumingIter<A> where A: Hash + Eq + Clone {}
+
+// Iterator conversions
+
+impl<A, RA, S> FromIterator<RA> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone + From<RA>,
+ S: BuildHasher + Default,
+{
+ fn from_iter<T>(i: T) -> Self
+ where
+ T: IntoIterator<Item = RA>,
+ {
+ let mut set = Self::default();
+ for value in i {
+ set.insert(From::from(value));
+ }
+ set
+ }
+}
+
+impl<'a, A, S> IntoIterator for &'a HashSet<A, S>
+where
+ A: Hash + Eq,
+ S: BuildHasher,
+{
+ type Item = &'a A;
+ type IntoIter = Iter<'a, A>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<A, S> IntoIterator for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher,
+{
+ type Item = A;
+ type IntoIter = ConsumingIter<Self::Item>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ ConsumingIter {
+ it: NodeDrain::new(&self.pool.0, self.root, self.size),
+ }
+ }
+}
+
+// Conversions
+
+impl<'s, 'a, A, OA, SA, SB> From<&'s HashSet<&'a A, SA>> for HashSet<OA, SB>
+where
+ A: ToOwned<Owned = OA> + Hash + Eq + ?Sized,
+ OA: Borrow<A> + Hash + Eq + Clone,
+ SA: BuildHasher,
+ SB: BuildHasher + Default,
+{
+ fn from(set: &HashSet<&A, SA>) -> Self {
+ set.iter().map(|a| (*a).to_owned()).collect()
+ }
+}
+
+impl<'a, A, S> From<&'a [A]> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(slice: &'a [A]) -> Self {
+ slice.iter().cloned().collect()
+ }
+}
+
+impl<A, S> From<Vec<A>> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(vec: Vec<A>) -> Self {
+ vec.into_iter().collect()
+ }
+}
+
+impl<'a, A, S> From<&'a Vec<A>> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(vec: &Vec<A>) -> Self {
+ vec.iter().cloned().collect()
+ }
+}
+
+impl<A, S> From<Vector<A>> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(vector: Vector<A>) -> Self {
+ vector.into_iter().collect()
+ }
+}
+
+impl<'a, A, S> From<&'a Vector<A>> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(vector: &Vector<A>) -> Self {
+ vector.iter().cloned().collect()
+ }
+}
+
+impl<A, S> From<collections::HashSet<A>> for HashSet<A, S>
+where
+ A: Eq + Hash + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(hash_set: collections::HashSet<A>) -> Self {
+ hash_set.into_iter().collect()
+ }
+}
+
+impl<'a, A, S> From<&'a collections::HashSet<A>> for HashSet<A, S>
+where
+ A: Eq + Hash + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(hash_set: &collections::HashSet<A>) -> Self {
+ hash_set.iter().cloned().collect()
+ }
+}
+
+impl<'a, A, S> From<&'a BTreeSet<A>> for HashSet<A, S>
+where
+ A: Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(btree_set: &BTreeSet<A>) -> Self {
+ btree_set.iter().cloned().collect()
+ }
+}
+
+impl<A, S> From<OrdSet<A>> for HashSet<A, S>
+where
+ A: Ord + Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(ordset: OrdSet<A>) -> Self {
+ ordset.into_iter().collect()
+ }
+}
+
+impl<'a, A, S> From<&'a OrdSet<A>> for HashSet<A, S>
+where
+ A: Ord + Hash + Eq + Clone,
+ S: BuildHasher + Default,
+{
+ fn from(ordset: &OrdSet<A>) -> Self {
+ ordset.into_iter().cloned().collect()
+ }
+}
+
+// Proptest
+#[cfg(any(test, feature = "proptest"))]
+#[doc(hidden)]
+pub mod proptest {
+ #[deprecated(
+ since = "14.3.0",
+ note = "proptest strategies have moved to im::proptest"
+ )]
+ pub use crate::proptest::hash_set;
+}
+
+#[cfg(test)]
+mod test {
+ use super::proptest::*;
+ use super::*;
+ use crate::test::LolHasher;
+ use ::proptest::num::i16;
+ use ::proptest::proptest;
+ use std::hash::BuildHasherDefault;
+
+ #[test]
+ fn insert_failing() {
+ let mut set: HashSet<i16, BuildHasherDefault<LolHasher>> = Default::default();
+ set.insert(14658);
+ assert_eq!(1, set.len());
+ set.insert(-19198);
+ assert_eq!(2, set.len());
+ }
+
+ #[test]
+ fn match_strings_with_string_slices() {
+ let mut set: HashSet<String> = From::from(&hashset!["foo", "bar"]);
+ set = set.without("bar");
+ assert!(!set.contains("bar"));
+ set.remove("foo");
+ assert!(!set.contains("foo"));
+ }
+
+ #[test]
+ fn macro_allows_trailing_comma() {
+ let set1 = hashset! {"foo", "bar"};
+ let set2 = hashset! {
+ "foo",
+ "bar",
+ };
+ assert_eq!(set1, set2);
+ }
+
+ #[test]
+ fn issue_60_drain_iterator_memory_corruption() {
+ use crate::test::MetroHashBuilder;
+ for i in 0..1000 {
+ let mut lhs = vec![0, 1, 2];
+ lhs.sort_unstable();
+
+ let hasher = Ref::from(MetroHashBuilder::new(i));
+ let mut iset: HashSet<_, MetroHashBuilder> = HashSet::with_hasher(hasher.clone());
+ for &i in &lhs {
+ iset.insert(i);
+ }
+
+ let mut rhs: Vec<_> = iset.clone().into_iter().collect();
+ rhs.sort_unstable();
+
+ if lhs != rhs {
+ println!("iteration: {}", i);
+ println!("seed: {}", hasher.seed());
+ println!("lhs: {}: {:?}", lhs.len(), &lhs);
+ println!("rhs: {}: {:?}", rhs.len(), &rhs);
+ panic!();
+ }
+ }
+ }
+
+ proptest! {
+ #[test]
+ fn proptest_a_set(ref s in hash_set(".*", 10..100)) {
+ assert!(s.len() < 100);
+ assert!(s.len() >= 10);
+ }
+ }
+}
diff --git a/vendor/im-rc/src/iter.rs b/vendor/im-rc/src/iter.rs
new file mode 100644
index 000000000..2327b8dd5
--- /dev/null
+++ b/vendor/im-rc/src/iter.rs
@@ -0,0 +1,42 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+//! Iterators over immutable data.
+
+/// Create an iterator of values using a function to update an owned state
+/// value.
+///
+/// The function is called with the current state as its argument, and should
+/// return an [`Option`][std::option::Option] of a tuple of the next value to
+/// yield from the iterator and the updated state. If the function returns
+/// [`None`][std::option::Option::None], the iterator ends.
+///
+/// # Examples
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::iter::unfold;
+/// # use im::vector::Vector;
+/// # use std::iter::FromIterator;
+/// // Create an infinite stream of numbers, starting at 0.
+/// let mut it = unfold(0, |i| Some((i, i + 1)));
+///
+/// // Make a list out of its first five elements.
+/// let numbers = Vector::from_iter(it.take(5));
+/// assert_eq!(numbers, vector![0, 1, 2, 3, 4]);
+/// ```
+///
+/// [std::option::Option]: https://doc.rust-lang.org/std/option/enum.Option.html
+/// [std::option::Option::None]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
+pub fn unfold<F, S, A>(value: S, f: F) -> impl Iterator<Item = A>
+where
+ F: Fn(S) -> Option<(A, S)>,
+{
+ let mut value = Some(value);
+ std::iter::from_fn(move || {
+ f(value.take().unwrap()).map(|(next, state)| {
+ value = Some(state);
+ next
+ })
+ })
+}
diff --git a/vendor/im-rc/src/lib.rs b/vendor/im-rc/src/lib.rs
new file mode 100644
index 000000000..fa7a0a9eb
--- /dev/null
+++ b/vendor/im-rc/src/lib.rs
@@ -0,0 +1,507 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+//! # Immutable Data Structures for Rust
+//!
+//! This library implements several of the more commonly useful immutable data
+//! structures for Rust.
+//!
+//! ## What are immutable data structures?
+//!
+//! Immutable data structures are data structures which can be copied and
+//! modified efficiently without altering the original. The most uncomplicated
+//! example of this is the venerable [cons list][cons-list]. This crate offers a
+//! selection of more modern and flexible data structures with similar
+//! properties, tuned for the needs of Rust developers.
+//!
+//! Briefly, the following data structures are provided:
+//!
+//! * [Vectors][vector::Vector] based on [RRB trees][rrb-tree]
+//! * [Hash maps][hashmap::HashMap]/[sets][hashset::HashSet] based on [hash
+//! array mapped tries][hamt]
+//! * [Ordered maps][ordmap::OrdMap]/[sets][ordset::OrdSet] based on
+//! [B-trees][b-tree]
+//!
+//! ## Why Would I Want This?
+//!
+//! While immutable data structures can be a game changer for other
+//! programming languages, the most obvious benefit - avoiding the
+//! accidental mutation of data - is already handled so well by Rust's
+//! type system that it's just not something a Rust programmer needs
+//! to worry about even when using data structures that would send a
+//! conscientious Clojure programmer into a panic.
+//!
+//! Immutable data structures offer other benefits, though, some of
+//! which are useful even in a language like Rust. The most prominent
+//! is *structural sharing*, which means that if two data structures
+//! are mostly copies of each other, most of the memory they take up
+//! will be shared between them. This implies that making copies of an
+//! immutable data structure is cheap: it's really only a matter of
+//! copying a pointer and increasing a reference counter, where in the
+//! case of [`Vec`][std::vec::Vec] you have to allocate the same
+//! amount of memory all over again and make a copy of every element
+//! it contains. For immutable data structures, extra memory isn't
+//! allocated until you modify either the copy or the original, and
+//! then only the memory needed to record the difference.
+//!
+//! Another goal of this library has been the idea that you shouldn't
+//! even have to think about what data structure to use in any given
+//! situation, until the point where you need to start worrying about
+//! optimisation - which, in practice, often never comes. Beyond the
+//! shape of your data (ie. whether to use a list or a map), it should
+//! be fine not to think too carefully about data structures - you can
+//! just pick the one that has the right shape and it should have
+//! acceptable performance characteristics for every operation you
+//! might need. Specialised data structures will always be faster at
+//! what they've been specialised for, but `im` aims to provide the
+//! data structures which deliver the least chance of accidentally
+//! using them for the wrong thing.
+//!
+//! For instance, [`Vec`][std::vec::Vec] beats everything at memory
+//! usage, indexing and operations that happen at the back of the
+//! list, but is terrible at insertion and removal, and gets worse the
+//! closer to the front of the list you get.
+//! [`VecDeque`][std::collections::VecDeque] adds a little bit of
+//! complexity in order to make operations at the front as efficient
+//! as operations at the back, but is still bad at insertion and
+//! especially concatenation. [`Vector`][vector::Vector] adds another
+//! bit of complexity, and could never match [`Vec`][std::vec::Vec] at
+//! what it's best at, but in return every operation you can throw at
+//! it can be completed in a reasonable amount of time - even normally
+//! expensive operations like copying and especially concatenation are
+//! reasonably cheap when using a [`Vector`][vector::Vector].
+//!
+//! It should be noted, however, that because of its simplicity,
+//! [`Vec`][std::vec::Vec] actually beats [`Vector`][vector::Vector] even at its
+//! strongest operations at small sizes, just because modern CPUs are
+//! hyperoptimised for things like copying small chunks of contiguous memory -
+//! you actually need to go past a certain size (usually in the vicinity of
+//! several hundred elements) before you get to the point where
+//! [`Vec`][std::vec::Vec] isn't always going to be the fastest choice.
+//! [`Vector`][vector::Vector] attempts to overcome this by actually just being
+//! an array at very small sizes, and being able to switch efficiently to the
+//! full data structure when it grows large enough. Thus,
+//! [`Vector`][vector::Vector] will actually be equivalent to
+//! [Vec][std::vec::Vec] until it grows past the size of a single chunk.
+//!
+//! The maps - [`HashMap`][hashmap::HashMap] and
+//! [`OrdMap`][ordmap::OrdMap] - generally perform similarly to their
+//! equivalents in the standard library, but tend to run a bit slower
+//! on the basic operations ([`HashMap`][hashmap::HashMap] is almost
+//! neck and neck with its counterpart, while
+//! [`OrdMap`][ordmap::OrdMap] currently tends to run 2-3x slower). On
+//! the other hand, they offer the cheap copy and structural sharing
+//! between copies that you'd expect from immutable data structures.
+//!
+//! In conclusion, the aim of this library is to provide a safe
+//! default choice for the most common kinds of data structures,
+//! allowing you to defer careful thinking about the right data
+//! structure for the job until you need to start looking for
+//! optimisations - and you may find, especially for larger data sets,
+//! that immutable data structures are still the right choice.
+//!
+//! ## Values
+//!
+//! Because we need to make copies of shared nodes in these data structures
+//! before updating them, the values you store in them must implement
+//! [`Clone`][std::clone::Clone]. For primitive values that implement
+//! [`Copy`][std::marker::Copy], such as numbers, everything is fine: this is
+//! the case for which the data structures are optimised, and performance is
+//! going to be great.
+//!
+//! On the other hand, if you want to store values for which cloning is
+//! expensive, or values that don't implement [`Clone`][std::clone::Clone], you
+//! need to wrap them in [`Rc`][std::rc::Rc] or [`Arc`][std::sync::Arc]. Thus,
+//! if you have a complex structure `BigBlobOfData` and you want to store a list
+//! of them as a `Vector<BigBlobOfData>`, you should instead use a
+//! `Vector<Rc<BigBlobOfData>>`, which is going to save you not only the time
+//! spent cloning the big blobs of data, but also the memory spent keeping
+//! multiple copies of it around, as [`Rc`][std::rc::Rc] keeps a single
+//! reference counted copy around instead.
+//!
+//! If you're storing smaller values that aren't
+//! [`Copy`][std::marker::Copy]able, you'll need to exercise judgement: if your
+//! values are going to be very cheap to clone, as would be the case for short
+//! [`String`][std::string::String]s or small [`Vec`][std::vec::Vec]s, you're
+//! probably better off storing them directly without wrapping them in an
+//! [`Rc`][std::rc::Rc], because, like the [`Rc`][std::rc::Rc], they're just
+//! pointers to some data on the heap, and that data isn't expensive to clone -
+//! you might actually lose more performance from the extra redirection of
+//! wrapping them in an [`Rc`][std::rc::Rc] than you would from occasionally
+//! cloning them.
+//!
+//! ### When does cloning happen?
+//!
+//! So when will your values actually be cloned? The easy answer is only if you
+//! [`clone`][std::clone::Clone::clone] the data structure itself, and then only
+//! lazily as you change it. Values are stored in tree nodes inside the data
+//! structure, each node of which contains up to 64 values. When you
+//! [`clone`][std::clone::Clone::clone] a data structure, nothing is actually
+//! copied - it's just the reference count on the root node that's incremented,
+//! to indicate that it's shared between two data structures. It's only when you
+//! actually modify one of the shared data structures that nodes are cloned:
+//! when you make a change somewhere in the tree, the node containing the change
+//! needs to be cloned, and then its parent nodes need to be updated to contain
+//! the new child node instead of the old version, and so they're cloned as
+//! well.
+//!
+//! We can call this "lazy" cloning - if you make two copies of a data structure
+//! and you never change either of them, there's never any need to clone the
+//! data they contain. It's only when you start making changes that cloning
+//! starts to happen, and then only on the specific tree nodes that are part of
+//! the change. Note that the implications of lazily cloning the data structure
+//! extend to memory usage as well as the CPU workload of copying the data
+//! around - cloning an immutable data structure means both copies share the
+//! same allocated memory, until you start making changes.
+//!
+//! Most crucially, if you never clone the data structure, the data inside it is
+//! also never cloned, and in this case it acts just like a mutable data
+//! structure, with minimal performance differences (but still non-zero, as we
+//! still have to check for shared nodes).
+//!
+//! ## Data Structures
+//!
+//! We'll attempt to provide a comprehensive guide to the available
+//! data structures below.
+//!
+//! ### Performance Notes
+//!
+//! "Big O notation" is the standard way of talking about the time
+//! complexity of data structure operations. If you're not familiar
+//! with big O notation, here's a quick cheat sheet:
+//!
+//! *O(1)* means an operation runs in constant time: it will take the
+//! same time to complete regardless of the size of the data
+//! structure.
+//!
+//! *O(n)* means an operation runs in linear time: if you double the
+//! size of your data structure, the operation will take twice as long
+//! to complete; if you quadruple the size, it will take four times as
+//! long, etc.
+//!
+//! *O(log n)* means an operation runs in logarithmic time: for
+//! *log<sub>2</sub>*, if you double the size of your data structure,
+//! the operation will take one step longer to complete; if you
+//! quadruple the size, it will need two steps more; and so on.
+//! However, the data structures in this library generally run in
+//! *log<sub>64</sub>* time, meaning you have to make your data
+//! structure 64 times bigger to need one extra step, and 4096 times
+//! bigger to need two steps. This means that, while they still count
+//! as O(log n), operations on all but really large data sets will run
+//! at near enough to O(1) that you won't usually notice.
+//!
+//! *O(n log n)* is the most expensive operation you'll see in this
+//! library: it means that for every one of the *n* elements in your
+//! data structure, you have to perform *log n* operations. In our
+//! case, as noted above, this is often close enough to O(n) that it's
+//! not usually as bad as it sounds, but even O(n) isn't cheap and the
+//! cost still increases logarithmically, if slowly, as the size of
+//! your data increases. O(n log n) basically means "are you sure you
+//! need to do this?"
+//!
+//! *O(1)** means 'amortised O(1),' which means that an operation
+//! usually runs in constant time but will occasionally be more
+//! expensive: for instance,
+//! [`Vector::push_back`][vector::Vector::push_back], if called in
+//! sequence, will be O(1) most of the time but every 64th time it
+//! will be O(log n), as it fills up its tail chunk and needs to
+//! insert it into the tree. Please note that the O(1) with the
+//! asterisk attached is not a common notation; it's just a convention
+//! I've used in these docs to save myself from having to type
+//! 'amortised' everywhere.
+//!
+//! ### Lists
+//!
+//! Lists are sequences of single elements which maintain the order in
+//! which you inserted them. The only list in this library is
+//! [`Vector`][vector::Vector], which offers the best all round
+//! performance characteristics: it's pretty good at everything, even
+//! if there's always another kind of list that's better at something.
+//!
+//! | Type | Algorithm | Constraints | Order | Push | Pop | Split | Append | Lookup |
+//! | --- | --- | --- | --- | --- | --- | --- | --- | --- |
+//! | [`Vector<A>`][vector::Vector] | [RRB tree][rrb-tree] | [`Clone`][std::clone::Clone] | insertion | O(1)\* | O(1)\* | O(log n) | O(log n) | O(log n) |
+//!
+//! ### Maps
+//!
+//! Maps are mappings of keys to values, where the most common read
+//! operation is to find the value associated with a given key. Maps
+//! may or may not have a defined order. Any given key can only occur
+//! once inside a map, and setting a key to a different value will
+//! overwrite the previous value.
+//!
+//! | Type | Algorithm | Key Constraints | Order | Insert | Remove | Lookup |
+//! | --- | --- | --- | --- | --- | --- | --- |
+//! | [`HashMap<K, V>`][hashmap::HashMap] | [HAMT][hamt] | [`Clone`][std::clone::Clone] + [`Hash`][std::hash::Hash] + [`Eq`][std::cmp::Eq] | undefined | O(log n) | O(log n) | O(log n) |
+//! | [`OrdMap<K, V>`][ordmap::OrdMap] | [B-tree][b-tree] | [`Clone`][std::clone::Clone] + [`Ord`][std::cmp::Ord] | sorted | O(log n) | O(log n) | O(log n) |
+//!
+//! ### Sets
+//!
+//! Sets are collections of unique values, and may or may not have a
+//! defined order. Their crucial property is that any given value can
+//! only exist once in a given set.
+//!
+//! | Type | Algorithm | Constraints | Order | Insert | Remove | Lookup |
+//! | --- | --- | --- | --- | --- | --- | --- |
+//! | [`HashSet<A>`][hashset::HashSet] | [HAMT][hamt] | [`Clone`][std::clone::Clone] + [`Hash`][std::hash::Hash] + [`Eq`][std::cmp::Eq] | undefined | O(log n) | O(log n) | O(log n) |
+//! | [`OrdSet<A>`][ordset::OrdSet] | [B-tree][b-tree] | [`Clone`][std::clone::Clone] + [`Ord`][std::cmp::Ord] | sorted | O(log n) | O(log n) | O(log n) |
+//!
+//! ## In-place Mutation
+//!
+//! All of these data structures support in-place copy-on-write
+//! mutation, which means that if you're the sole user of a data
+//! structure, you can update it in place without taking the
+//! performance hit of making a copy of the data structure before
+//! modifying it (this is about an order of magnitude faster than
+//! immutable operations, almost as fast as
+//! [`std::collections`][std::collections]'s mutable data structures).
+//!
+//! Thanks to [`Rc`][std::rc::Rc]'s reference counting, we are able to
+//! determine whether a node in a data structure is being shared with
+//! other data structures, or whether it's safe to mutate it in place.
+//! When it's shared, we'll automatically make a copy of the node
+//! before modifying it. The consequence of this is that cloning a
+//! data structure becomes a lazy operation: the initial clone is
+//! instant, and as you modify the cloned data structure it will clone
+//! chunks only where you change them, so that if you change the
+//! entire thing you will eventually have performed a full clone.
+//!
+//! This also gives us a couple of other optimisations for free:
+//! implementations of immutable data structures in other languages
+//! often have the idea of local mutation, like Clojure's transients
+//! or Haskell's `ST` monad - a managed scope where you can treat an
+//! immutable data structure like a mutable one, gaining a
+//! considerable amount of performance because you no longer need to
+//! copy your changed nodes for every operation, just the first time
+//! you hit a node that's sharing structure. In Rust, we don't need to
+//! think about this kind of managed scope, it's all taken care of
+//! behind the scenes because of our low level access to the garbage
+//! collector (which, in our case, is just a simple
+//! [`Rc`][std::rc::Rc]).
+//!
+//! ## Thread Safety
+//!
+//! The data structures in the `im` crate are thread safe, through
+//! [`Arc`][std::sync::Arc]. This comes with a slight performance impact, so
+//! that if you prioritise speed over thread safety, you may want to use the
+//! `im-rc` crate instead, which is identical to `im` except that it uses
+//! [`Rc`][std::rc::Rc] instead of [`Arc`][std::sync::Arc], implying that the
+//! data structures in `im-rc` do not implement [`Send`][std::marker::Send] and
+//! [`Sync`][std::marker::Sync]. This yields approximately a 20-25% increase in
+//! general performance.
+//!
+//! ## Feature Flags
+//!
+//! `im` comes with optional support for the following crates through Cargo
+//! feature flags. You can enable them in your `Cargo.toml` file like this:
+//!
+//! ```no_compile
+//! [dependencies]
+//! im = { version = "*", features = ["proptest", "serde"] }
+//! ```
+//!
+//! | Feature | Description |
+//! | ------- | ----------- |
+//! | [`pool`](https://crates.io/crates/refpool) | Constructors and pool types for [`refpool`](https://crates.io/crates/refpool) memory pools (only available in `im-rc`) |
+//! | [`proptest`](https://crates.io/crates/proptest) | Strategies for all `im` datatypes under a `proptest` namespace, eg. `im::vector::proptest::vector()` |
+//! | [`quickcheck`](https://crates.io/crates/quickcheck) | [`quickcheck::Arbitrary`](https://docs.rs/quickcheck/latest/quickcheck/trait.Arbitrary.html) implementations for all `im` datatypes (not available in `im-rc`) |
+//! | [`rayon`](https://crates.io/crates/rayon) | parallel iterator implementations for [`Vector`][vector::Vector] (not available in `im-rc`) |
+//! | [`serde`](https://crates.io/crates/serde) | [`Serialize`](https://docs.rs/serde/latest/serde/trait.Serialize.html) and [`Deserialize`](https://docs.rs/serde/latest/serde/trait.Deserialize.html) implementations for all `im` datatypes |
+//! | [`arbitrary`](https://crates.io/crates/arbitrary/) | [`arbitrary::Arbitrary`](https://docs.rs/arbitrary/latest/arbitrary/trait.Arbitrary.html) implementations for all `im` datatypes |
+//!
+//! [std::collections]: https://doc.rust-lang.org/std/collections/index.html
+//! [std::collections::VecDeque]: https://doc.rust-lang.org/std/collections/struct.VecDeque.html
+//! [std::vec::Vec]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+//! [std::string::String]: https://doc.rust-lang.org/std/string/struct.String.html
+//! [std::rc::Rc]: https://doc.rust-lang.org/std/rc/struct.Rc.html
+//! [std::sync::Arc]: https://doc.rust-lang.org/std/sync/struct.Arc.html
+//! [std::cmp::Eq]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+//! [std::cmp::Ord]: https://doc.rust-lang.org/std/cmp/trait.Ord.html
+//! [std::clone::Clone]: https://doc.rust-lang.org/std/clone/trait.Clone.html
+//! [std::clone::Clone::clone]: https://doc.rust-lang.org/std/clone/trait.Clone.html#tymethod.clone
+//! [std::marker::Copy]: https://doc.rust-lang.org/std/marker/trait.Copy.html
+//! [std::hash::Hash]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+//! [std::marker::Send]: https://doc.rust-lang.org/std/marker/trait.Send.html
+//! [std::marker::Sync]: https://doc.rust-lang.org/std/marker/trait.Sync.html
+//! [hashmap::HashMap]: ./struct.HashMap.html
+//! [hashset::HashSet]: ./struct.HashSet.html
+//! [ordmap::OrdMap]: ./struct.OrdMap.html
+//! [ordset::OrdSet]: ./struct.OrdSet.html
+//! [vector::Vector]: ./struct.Vector.html
+//! [vector::Vector::push_back]: ./vector/enum.Vector.html#method.push_back
+//! [rrb-tree]: https://infoscience.epfl.ch/record/213452/files/rrbvector.pdf
+//! [hamt]: https://en.wikipedia.org/wiki/Hash_array_mapped_trie
+//! [b-tree]: https://en.wikipedia.org/wiki/B-tree
+//! [cons-list]: https://en.wikipedia.org/wiki/Cons#Lists
+
+#![forbid(rust_2018_idioms)]
+#![deny(unsafe_code, nonstandard_style)]
+#![warn(unreachable_pub, missing_docs)]
+#![cfg_attr(has_specialisation, feature(specialization))]
+
+#[cfg(test)]
+#[macro_use]
+extern crate pretty_assertions;
+
+mod config;
+mod nodes;
+mod sort;
+mod sync;
+
+#[macro_use]
+mod util;
+
+#[macro_use]
+mod ord;
+pub use crate::ord::map as ordmap;
+pub use crate::ord::set as ordset;
+
+#[macro_use]
+mod hash;
+pub use crate::hash::map as hashmap;
+pub use crate::hash::set as hashset;
+
+#[macro_use]
+pub mod vector;
+
+pub mod iter;
+
+#[cfg(any(test, feature = "proptest"))]
+pub mod proptest;
+
+#[cfg(any(test, feature = "serde"))]
+#[doc(hidden)]
+pub mod ser;
+
+#[cfg(feature = "arbitrary")]
+#[doc(hidden)]
+pub mod arbitrary;
+
+#[cfg(all(threadsafe, feature = "quickcheck"))]
+#[doc(hidden)]
+pub mod quickcheck;
+
+#[cfg(any(threadsafe, not(feature = "pool")))]
+mod fakepool;
+
+#[cfg(all(threadsafe, feature = "pool"))]
+compile_error!(
+ "The `pool` feature is not threadsafe but you've enabled it on a threadsafe version of `im`."
+);
+
+pub use crate::hashmap::HashMap;
+pub use crate::hashset::HashSet;
+pub use crate::ordmap::OrdMap;
+pub use crate::ordset::OrdSet;
+#[doc(inline)]
+pub use crate::vector::Vector;
+
+#[cfg(test)]
+mod test;
+
+#[cfg(test)]
+mod tests;
+
+/// Update a value inside multiple levels of data structures.
+///
+/// This macro takes a [`Vector`][Vector], [`OrdMap`][OrdMap] or [`HashMap`][HashMap],
+/// a key or a series of keys, and a value, and returns the data structure with the
+/// new value at the location described by the keys.
+///
+/// If one of the keys in the path doesn't exist, the macro will panic.
+///
+/// # Examples
+///
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use std::sync::Arc;
+/// # fn main() {
+/// let vec_inside_vec = vector![vector![1, 2, 3], vector![4, 5, 6]];
+///
+/// let expected = vector![vector![1, 2, 3], vector![4, 5, 1337]];
+///
+/// assert_eq!(expected, update_in![vec_inside_vec, 1 => 2, 1337]);
+/// # }
+/// ```
+///
+/// [Vector]: ../vector/enum.Vector.html
+/// [HashMap]: ../hashmap/struct.HashMap.html
+/// [OrdMap]: ../ordmap/struct.OrdMap.html
+#[macro_export]
+macro_rules! update_in {
+ ($target:expr, $path:expr => $($tail:tt) => *, $value:expr ) => {{
+ let inner = $target.get($path).expect("update_in! macro: key not found in target");
+ $target.update($path, update_in!(inner, $($tail) => *, $value))
+ }};
+
+ ($target:expr, $path:expr, $value:expr) => {
+ $target.update($path, $value)
+ };
+}
+
+/// Get a value inside multiple levels of data structures.
+///
+/// This macro takes a [`Vector`][Vector], [`OrdMap`][OrdMap] or [`HashMap`][HashMap],
+/// along with a key or a series of keys, and returns the value at the location inside
+/// the data structure described by the key sequence, or `None` if any of the keys didn't
+/// exist.
+///
+/// # Examples
+///
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use std::sync::Arc;
+/// # fn main() {
+/// let vec_inside_vec = vector![vector![1, 2, 3], vector![4, 5, 6]];
+///
+/// assert_eq!(Some(&6), get_in![vec_inside_vec, 1 => 2]);
+/// # }
+/// ```
+///
+/// [Vector]: ../vector/enum.Vector.html
+/// [HashMap]: ../hashmap/struct.HashMap.html
+/// [OrdMap]: ../ordmap/struct.OrdMap.html
+#[macro_export]
+macro_rules! get_in {
+ ($target:expr, $path:expr => $($tail:tt) => * ) => {{
+ $target.get($path).and_then(|v| get_in!(v, $($tail) => *))
+ }};
+
+ ($target:expr, $path:expr) => {
+ $target.get($path)
+ };
+}
+
+#[cfg(test)]
+mod lib_test {
+ #[test]
+ fn update_in() {
+ let vector = vector![1, 2, 3, 4, 5];
+ assert_eq!(vector![1, 2, 23, 4, 5], update_in!(vector, 2, 23));
+ let hashmap = hashmap![1 => 1, 2 => 2, 3 => 3];
+ assert_eq!(
+ hashmap![1 => 1, 2 => 23, 3 => 3],
+ update_in!(hashmap, 2, 23)
+ );
+ let ordmap = ordmap![1 => 1, 2 => 2, 3 => 3];
+ assert_eq!(ordmap![1 => 1, 2 => 23, 3 => 3], update_in!(ordmap, 2, 23));
+
+ let vecs = vector![vector![1, 2, 3], vector![4, 5, 6], vector![7, 8, 9]];
+ let vecs_target = vector![vector![1, 2, 3], vector![4, 5, 23], vector![7, 8, 9]];
+ assert_eq!(vecs_target, update_in!(vecs, 1 => 2, 23));
+ }
+
+ #[test]
+ fn get_in() {
+ let vector = vector![1, 2, 3, 4, 5];
+ assert_eq!(Some(&3), get_in!(vector, 2));
+ let hashmap = hashmap![1 => 1, 2 => 2, 3 => 3];
+ assert_eq!(Some(&2), get_in!(hashmap, &2));
+ let ordmap = ordmap![1 => 1, 2 => 2, 3 => 3];
+ assert_eq!(Some(&2), get_in!(ordmap, &2));
+
+ let vecs = vector![vector![1, 2, 3], vector![4, 5, 6], vector![7, 8, 9]];
+ assert_eq!(Some(&6), get_in!(vecs, 1 => 2));
+ }
+}
diff --git a/vendor/im-rc/src/nodes/btree.rs b/vendor/im-rc/src/nodes/btree.rs
new file mode 100644
index 000000000..84f63fa96
--- /dev/null
+++ b/vendor/im-rc/src/nodes/btree.rs
@@ -0,0 +1,1368 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::mem;
+use std::ops::{Bound, RangeBounds};
+
+use sized_chunks::Chunk;
+use typenum::{Add1, Unsigned};
+
+use crate::config::OrdChunkSize as NodeSize;
+use crate::util::{Pool, PoolClone, PoolDefault, PoolRef};
+
+use self::Insert::*;
+use self::InsertAction::*;
+
+pub(crate) const NODE_SIZE: usize = NodeSize::USIZE;
+const MEDIAN: usize = (NODE_SIZE + 1) >> 1;
+
+pub trait BTreeValue {
+ type Key;
+ fn ptr_eq(&self, other: &Self) -> bool;
+ fn search_key<BK>(slice: &[Self], key: &BK) -> Result<usize, usize>
+ where
+ BK: Ord + ?Sized,
+ Self: Sized,
+ Self::Key: Borrow<BK>;
+ fn search_value(slice: &[Self], value: &Self) -> Result<usize, usize>
+ where
+ Self: Sized;
+ fn cmp_keys<BK>(&self, other: &BK) -> Ordering
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>;
+ fn cmp_values(&self, other: &Self) -> Ordering;
+}
+
+pub(crate) struct Node<A> {
+ keys: Chunk<A, NodeSize>,
+ children: Chunk<Option<PoolRef<Node<A>>>, Add1<NodeSize>>,
+}
+
+#[cfg(feature = "pool")]
+#[allow(unsafe_code)]
+unsafe fn cast_uninit<A>(target: &mut A) -> &mut mem::MaybeUninit<A> {
+ &mut *(target as *mut A as *mut mem::MaybeUninit<A>)
+}
+
+#[allow(unsafe_code)]
+impl<A> PoolDefault for Node<A> {
+ #[cfg(feature = "pool")]
+ unsafe fn default_uninit(target: &mut mem::MaybeUninit<Self>) {
+ let ptr: *mut Self = target.as_mut_ptr();
+ Chunk::default_uninit(cast_uninit(&mut (*ptr).keys));
+ Chunk::default_uninit(cast_uninit(&mut (*ptr).children));
+ (*ptr).children.push_back(None);
+ }
+}
+
+#[allow(unsafe_code)]
+impl<A> PoolClone for Node<A>
+where
+ A: Clone,
+{
+ #[cfg(feature = "pool")]
+ unsafe fn clone_uninit(&self, target: &mut mem::MaybeUninit<Self>) {
+ self.keys
+ .clone_uninit(cast_uninit(&mut (*target.as_mut_ptr()).keys));
+ self.children
+ .clone_uninit(cast_uninit(&mut (*target.as_mut_ptr()).children));
+ }
+}
+
+pub(crate) enum Insert<A> {
+ Added,
+ Replaced(A),
+ Split(Node<A>, A, Node<A>),
+}
+
+enum InsertAction<A> {
+ AddedAction,
+ ReplacedAction(A),
+ InsertAt,
+ InsertSplit(Node<A>, A, Node<A>),
+}
+
+pub(crate) enum Remove<A> {
+ NoChange,
+ Removed(A),
+ Update(A, Node<A>),
+}
+
+enum Boundary {
+ Lowest,
+ Highest,
+}
+
+enum RemoveAction {
+ DeleteAt(usize),
+ PullUp(Boundary, usize, usize),
+ Merge(usize),
+ StealFromLeft(usize),
+ StealFromRight(usize),
+ MergeFirst(usize),
+ ContinueDown(usize),
+}
+
+impl<A> Clone for Node<A>
+where
+ A: Clone,
+{
+ fn clone(&self) -> Self {
+ Node {
+ keys: self.keys.clone(),
+ children: self.children.clone(),
+ }
+ }
+}
+
+impl<A> Default for Node<A> {
+ fn default() -> Self {
+ Node {
+ keys: Chunk::new(),
+ children: Chunk::unit(None),
+ }
+ }
+}
+
+impl<A> Node<A> {
+ #[inline]
+ fn has_room(&self) -> bool {
+ self.keys.len() < NODE_SIZE
+ }
+
+ #[inline]
+ fn too_small(&self) -> bool {
+ self.keys.len() < MEDIAN
+ }
+
+ #[inline]
+ pub(crate) fn unit(value: A) -> Self {
+ Node {
+ keys: Chunk::unit(value),
+ children: Chunk::pair(None, None),
+ }
+ }
+
+ #[inline]
+ pub(crate) fn new_from_split(
+ pool: &Pool<Node<A>>,
+ left: Node<A>,
+ median: A,
+ right: Node<A>,
+ ) -> Self {
+ Node {
+ keys: Chunk::unit(median),
+ children: Chunk::pair(
+ Some(PoolRef::new(pool, left)),
+ Some(PoolRef::new(pool, right)),
+ ),
+ }
+ }
+
+ pub(crate) fn min(&self) -> Option<&A> {
+ match self.children.first().unwrap() {
+ None => self.keys.first(),
+ Some(ref child) => child.min(),
+ }
+ }
+
+ pub(crate) fn max(&self) -> Option<&A> {
+ match self.children.last().unwrap() {
+ None => self.keys.last(),
+ Some(ref child) => child.max(),
+ }
+ }
+}
+
+impl<A: BTreeValue> Node<A> {
+ fn child_contains<BK>(&self, index: usize, key: &BK) -> bool
+ where
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if let Some(Some(ref child)) = self.children.get(index) {
+ child.lookup(key).is_some()
+ } else {
+ false
+ }
+ }
+
+ pub(crate) fn lookup<BK>(&self, key: &BK) -> Option<&A>
+ where
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return None;
+ }
+ // Perform a binary search, resulting in either a match or
+ // the index of the first higher key, meaning we search the
+ // child to the left of it.
+ match A::search_key(&self.keys, key) {
+ Ok(index) => Some(&self.keys[index]),
+ Err(index) => match self.children[index] {
+ None => None,
+ Some(ref node) => node.lookup(key),
+ },
+ }
+ }
+
+ pub(crate) fn lookup_mut<BK>(&mut self, pool: &Pool<Node<A>>, key: &BK) -> Option<&mut A>
+ where
+ A: Clone,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return None;
+ }
+ // Perform a binary search, resulting in either a match or
+ // the index of the first higher key, meaning we search the
+ // child to the left of it.
+ match A::search_key(&self.keys, key) {
+ Ok(index) => Some(&mut self.keys[index]),
+ Err(index) => match self.children[index] {
+ None => None,
+ Some(ref mut child_ref) => {
+ let child = PoolRef::make_mut(pool, child_ref);
+ child.lookup_mut(pool, key)
+ }
+ },
+ }
+ }
+
+ pub(crate) fn lookup_prev<'a, BK>(&'a self, key: &BK) -> Option<&A>
+ where
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return None;
+ }
+ match A::search_key(&self.keys, key) {
+ Ok(index) => Some(&self.keys[index]),
+ Err(index) => match self.children[index] {
+ None if index == 0 => None,
+ None => self.keys.get(index - 1).map(|_| &self.keys[index - 1]),
+ Some(ref node) => node.lookup_prev(key),
+ },
+ }
+ }
+
+ pub(crate) fn lookup_next<'a, BK>(&'a self, key: &BK) -> Option<&A>
+ where
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return None;
+ }
+ match A::search_key(&self.keys, key) {
+ Ok(index) => Some(&self.keys[index]),
+ Err(index) => match self.children[index] {
+ None => self.keys.get(index).map(|_| &self.keys[index]),
+ Some(ref node) => node.lookup_next(key),
+ },
+ }
+ }
+
+ pub(crate) fn lookup_prev_mut<'a, BK>(
+ &'a mut self,
+ pool: &Pool<Node<A>>,
+ key: &BK,
+ ) -> Option<&mut A>
+ where
+ A: Clone,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return None;
+ }
+ match A::search_key(&self.keys, key) {
+ Ok(index) => Some(&mut self.keys[index]),
+ Err(index) => match self.children[index] {
+ None if index == 0 => None,
+ None => match self.keys.get(index - 1) {
+ Some(_) => Some(&mut self.keys[index - 1]),
+ None => None,
+ },
+ Some(ref mut node) => PoolRef::make_mut(pool, node).lookup_prev_mut(pool, key),
+ },
+ }
+ }
+
+ pub(crate) fn lookup_next_mut<'a, BK>(
+ &'a mut self,
+ pool: &Pool<Node<A>>,
+ key: &BK,
+ ) -> Option<&mut A>
+ where
+ A: Clone,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return None;
+ }
+ match A::search_key(&self.keys, key) {
+ Ok(index) => Some(&mut self.keys[index]),
+ Err(index) => match self.children[index] {
+ None => match self.keys.get(index) {
+ Some(_) => Some(&mut self.keys[index]),
+ None => None,
+ },
+ Some(ref mut node) => PoolRef::make_mut(pool, node).lookup_next_mut(pool, key),
+ },
+ }
+ }
+
+ pub(crate) fn path_first<'a, BK>(
+ &'a self,
+ mut path: Vec<(&'a Node<A>, usize)>,
+ ) -> Vec<(&'a Node<A>, usize)>
+ where
+ A: 'a,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return Vec::new();
+ }
+ match self.children[0] {
+ None => {
+ path.push((self, 0));
+ path
+ }
+ Some(ref node) => {
+ path.push((self, 0));
+ node.path_first(path)
+ }
+ }
+ }
+
+ pub(crate) fn path_last<'a, BK>(
+ &'a self,
+ mut path: Vec<(&'a Node<A>, usize)>,
+ ) -> Vec<(&'a Node<A>, usize)>
+ where
+ A: 'a,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return Vec::new();
+ }
+ let end = self.children.len() - 1;
+ match self.children[end] {
+ None => {
+ path.push((self, end - 1));
+ path
+ }
+ Some(ref node) => {
+ path.push((self, end));
+ node.path_last(path)
+ }
+ }
+ }
+
+ pub(crate) fn path_next<'a, BK>(
+ &'a self,
+ key: &BK,
+ mut path: Vec<(&'a Node<A>, usize)>,
+ ) -> Vec<(&'a Node<A>, usize)>
+ where
+ A: 'a,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return Vec::new();
+ }
+ match A::search_key(&self.keys, key) {
+ Ok(index) => {
+ path.push((self, index));
+ path
+ }
+ Err(index) => match self.children[index] {
+ None => match self.keys.get(index) {
+ Some(_) => {
+ path.push((self, index));
+ path
+ }
+ None => {
+ // go back up to find next
+ while let Some((node, idx)) = path.last() {
+ if node.keys.len() == *idx {
+ path.pop();
+ } else {
+ break;
+ }
+ }
+ path
+ }
+ },
+ Some(ref node) => {
+ path.push((self, index));
+ node.path_next(key, path)
+ }
+ },
+ }
+ }
+
+ pub(crate) fn path_prev<'a, BK>(
+ &'a self,
+ key: &BK,
+ mut path: Vec<(&'a Node<A>, usize)>,
+ ) -> Vec<(&'a Node<A>, usize)>
+ where
+ A: 'a,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ if self.keys.is_empty() {
+ return Vec::new();
+ }
+ match A::search_key(&self.keys, key) {
+ Ok(index) => {
+ path.push((self, index));
+ path
+ }
+ Err(index) => match self.children[index] {
+ None if index == 0 => {
+ // go back up to find prev
+ while let Some((_, idx)) = path.last_mut() {
+ if *idx == 0 {
+ path.pop();
+ } else {
+ *idx -= 1;
+ break;
+ }
+ }
+ path
+ }
+ None => {
+ path.push((self, index - 1));
+ path
+ }
+ Some(ref node) => {
+ path.push((self, index));
+ node.path_prev(key, path)
+ }
+ },
+ }
+ }
+
+ fn split(
+ &mut self,
+ pool: &Pool<Node<A>>,
+ value: A,
+ ins_left: Option<Node<A>>,
+ ins_right: Option<Node<A>>,
+ ) -> Insert<A> {
+ let left_child = ins_left.map(|node| PoolRef::new(pool, node));
+ let right_child = ins_right.map(|node| PoolRef::new(pool, node));
+ let index = A::search_value(&self.keys, &value).unwrap_err();
+ let mut left_keys;
+ let mut left_children;
+ let mut right_keys;
+ let mut right_children;
+ let median;
+ match index.cmp(&MEDIAN) {
+ Ordering::Less => {
+ self.children[index] = left_child;
+
+ left_keys = Chunk::from_front(&mut self.keys, index);
+ left_keys.push_back(value);
+ left_keys.drain_from_front(&mut self.keys, MEDIAN - index - 1);
+
+ left_children = Chunk::from_front(&mut self.children, index + 1);
+ left_children.push_back(right_child);
+ left_children.drain_from_front(&mut self.children, MEDIAN - index - 1);
+
+ median = self.keys.pop_front();
+
+ right_keys = Chunk::drain_from(&mut self.keys);
+ right_children = Chunk::drain_from(&mut self.children);
+ }
+ Ordering::Greater => {
+ self.children[index] = left_child;
+
+ left_keys = Chunk::from_front(&mut self.keys, MEDIAN);
+ left_children = Chunk::from_front(&mut self.children, MEDIAN + 1);
+
+ median = self.keys.pop_front();
+
+ right_keys = Chunk::from_front(&mut self.keys, index - MEDIAN - 1);
+ right_keys.push_back(value);
+ right_keys.append(&mut self.keys);
+
+ right_children = Chunk::from_front(&mut self.children, index - MEDIAN);
+ right_children.push_back(right_child);
+ right_children.append(&mut self.children);
+ }
+ Ordering::Equal => {
+ left_keys = Chunk::from_front(&mut self.keys, MEDIAN);
+ left_children = Chunk::from_front(&mut self.children, MEDIAN);
+ left_children.push_back(left_child);
+
+ median = value;
+
+ right_keys = Chunk::drain_from(&mut self.keys);
+ right_children = Chunk::drain_from(&mut self.children);
+ right_children[0] = right_child;
+ }
+ }
+
+ debug_assert!(left_keys.len() == MEDIAN);
+ debug_assert!(left_children.len() == MEDIAN + 1);
+ debug_assert!(right_keys.len() == MEDIAN);
+ debug_assert!(right_children.len() == MEDIAN + 1);
+
+ Split(
+ Node {
+ keys: left_keys,
+ children: left_children,
+ },
+ median,
+ Node {
+ keys: right_keys,
+ children: right_children,
+ },
+ )
+ }
+
+ fn merge(middle: A, left: Node<A>, mut right: Node<A>) -> Node<A> {
+ let mut keys = left.keys;
+ keys.push_back(middle);
+ keys.append(&mut right.keys);
+ let mut children = left.children;
+ children.append(&mut right.children);
+ Node { keys, children }
+ }
+
+ fn pop_min(&mut self) -> (A, Option<PoolRef<Node<A>>>) {
+ let value = self.keys.pop_front();
+ let child = self.children.pop_front();
+ (value, child)
+ }
+
+ fn pop_max(&mut self) -> (A, Option<PoolRef<Node<A>>>) {
+ let value = self.keys.pop_back();
+ let child = self.children.pop_back();
+ (value, child)
+ }
+
+ fn push_min(&mut self, child: Option<PoolRef<Node<A>>>, value: A) {
+ self.keys.push_front(value);
+ self.children.push_front(child);
+ }
+
+ fn push_max(&mut self, child: Option<PoolRef<Node<A>>>, value: A) {
+ self.keys.push_back(value);
+ self.children.push_back(child);
+ }
+
+ pub(crate) fn insert(&mut self, pool: &Pool<Node<A>>, value: A) -> Insert<A>
+ where
+ A: Clone,
+ {
+ if self.keys.is_empty() {
+ self.keys.push_back(value);
+ self.children.push_back(None);
+ return Insert::Added;
+ }
+ let (median, left, right) = match A::search_value(&self.keys, &value) {
+ // Key exists in node
+ Ok(index) => {
+ return Insert::Replaced(mem::replace(&mut self.keys[index], value));
+ }
+ // Key is adjacent to some key in node
+ Err(index) => {
+ let has_room = self.has_room();
+ let action = match self.children[index] {
+ // No child at location, this is the target node.
+ None => InsertAt,
+ // Child at location, pass it on.
+ Some(ref mut child_ref) => {
+ let child = PoolRef::make_mut(pool, child_ref);
+ match child.insert(pool, value.clone()) {
+ Insert::Added => AddedAction,
+ Insert::Replaced(value) => ReplacedAction(value),
+ Insert::Split(left, median, right) => InsertSplit(left, median, right),
+ }
+ }
+ };
+ match action {
+ ReplacedAction(value) => return Insert::Replaced(value),
+ AddedAction => {
+ return Insert::Added;
+ }
+ InsertAt => {
+ if has_room {
+ self.keys.insert(index, value);
+ self.children.insert(index + 1, None);
+ return Insert::Added;
+ } else {
+ (value, None, None)
+ }
+ }
+ InsertSplit(left, median, right) => {
+ if has_room {
+ self.children[index] = Some(PoolRef::new(pool, left));
+ self.keys.insert(index, median);
+ self.children
+ .insert(index + 1, Some(PoolRef::new(pool, right)));
+ return Insert::Added;
+ } else {
+ (median, Some(left), Some(right))
+ }
+ }
+ }
+ }
+ };
+ self.split(pool, median, left, right)
+ }
+
+ pub(crate) fn remove<BK>(&mut self, pool: &Pool<Node<A>>, key: &BK) -> Remove<A>
+ where
+ A: Clone,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ let index = A::search_key(&self.keys, key);
+ self.remove_index(pool, index, Ok(key))
+ }
+
+ fn remove_target<BK>(
+ &mut self,
+ pool: &Pool<Node<A>>,
+ target: Result<&BK, Boundary>,
+ ) -> Remove<A>
+ where
+ A: Clone,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ let index = match target {
+ Ok(key) => A::search_key(&self.keys, key),
+ Err(Boundary::Lowest) => Err(0),
+ Err(Boundary::Highest) => Err(self.keys.len()),
+ };
+ self.remove_index(pool, index, target)
+ }
+
+ fn remove_index<BK>(
+ &mut self,
+ pool: &Pool<Node<A>>,
+ index: Result<usize, usize>,
+ target: Result<&BK, Boundary>,
+ ) -> Remove<A>
+ where
+ A: Clone,
+ BK: Ord + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ let action = match index {
+ // Key exists in node, remove it.
+ Ok(index) => {
+ match (&self.children[index], &self.children[index + 1]) {
+ // If we're a leaf, just delete the entry.
+ (&None, &None) => RemoveAction::DeleteAt(index),
+ // First consider pulling either predecessor (from left) or successor (from right).
+ // otherwise just merge the two small children.
+ (&Some(ref left), &Some(ref right)) => {
+ if !left.too_small() {
+ RemoveAction::PullUp(Boundary::Highest, index, index)
+ } else if !right.too_small() {
+ RemoveAction::PullUp(Boundary::Lowest, index, index + 1)
+ } else {
+ RemoveAction::Merge(index)
+ }
+ }
+ _ => unreachable!("Branch missing children"),
+ }
+ }
+ // Target is adjacent to some key in node
+ Err(index) => match self.children[index] {
+ // We're deading with a leaf node
+ None => match target {
+ // No child at location means key isn't in map.
+ Ok(_key) => return Remove::NoChange,
+ // Looking for the lowest or highest key
+ Err(Boundary::Lowest) => RemoveAction::DeleteAt(0),
+ Err(Boundary::Highest) => RemoveAction::DeleteAt(self.keys.len() - 1),
+ },
+ // Child at location, but it's at minimum capacity.
+ Some(ref child) if child.too_small() => {
+ let left = if index > 0 {
+ self.children.get(index - 1)
+ } else {
+ None
+ }; // index is usize and can't be negative, best make sure it never is.
+ match (left, self.children.get(index + 1)) {
+ // If it has a left sibling with capacity, steal a key from it.
+ (Some(&Some(ref old_left)), _) if !old_left.too_small() => {
+ RemoveAction::StealFromLeft(index)
+ }
+ // If it has a right sibling with capacity, same as above.
+ (_, Some(&Some(ref old_right))) if !old_right.too_small() => {
+ RemoveAction::StealFromRight(index)
+ }
+ // If it has neither, we'll have to merge it with a sibling.
+ // If we have a right sibling, we'll merge with that.
+ (_, Some(&Some(_))) => RemoveAction::MergeFirst(index),
+ // If we have a left sibling, we'll merge with that.
+ (Some(&Some(_)), _) => RemoveAction::MergeFirst(index - 1),
+ // If none of the above, we're in a bad state.
+ _ => unreachable!(),
+ }
+ }
+ // Child at location, and it's big enough, we can recurse down.
+ Some(_) => RemoveAction::ContinueDown(index),
+ },
+ };
+ match action {
+ RemoveAction::DeleteAt(index) => {
+ let pair = self.keys.remove(index);
+ self.children.remove(index);
+ Remove::Removed(pair)
+ }
+ RemoveAction::PullUp(boundary, pull_to, child_index) => {
+ let children = &mut self.children;
+ let mut update = None;
+ let value;
+ if let Some(&mut Some(ref mut child_ref)) = children.get_mut(child_index) {
+ let child = PoolRef::make_mut(pool, child_ref);
+ match child.remove_target(pool, Err(boundary)) {
+ Remove::NoChange => unreachable!(),
+ Remove::Removed(pulled_value) => {
+ value = self.keys.set(pull_to, pulled_value);
+ }
+ Remove::Update(pulled_value, new_child) => {
+ value = self.keys.set(pull_to, pulled_value);
+ update = Some(new_child);
+ }
+ }
+ } else {
+ unreachable!()
+ }
+ if let Some(new_child) = update {
+ children[child_index] = Some(PoolRef::new(pool, new_child));
+ }
+ Remove::Removed(value)
+ }
+ RemoveAction::Merge(index) => {
+ let left = self.children.remove(index).unwrap();
+ let right = mem::replace(&mut self.children[index], None).unwrap();
+ let value = self.keys.remove(index);
+ let mut merged_child = Node::merge(
+ value,
+ PoolRef::unwrap_or_clone(left),
+ PoolRef::unwrap_or_clone(right),
+ );
+ let (removed, new_child) = match merged_child.remove_target(pool, target) {
+ Remove::NoChange => unreachable!(),
+ Remove::Removed(removed) => (removed, merged_child),
+ Remove::Update(removed, updated_child) => (removed, updated_child),
+ };
+ if self.keys.is_empty() {
+ // If we've depleted the root node, the merged child becomes the root.
+ Remove::Update(removed, new_child)
+ } else {
+ self.children[index] = Some(PoolRef::new(pool, new_child));
+ Remove::Removed(removed)
+ }
+ }
+ RemoveAction::StealFromLeft(index) => {
+ let mut update = None;
+ let out_value;
+ {
+ let mut children = self.children.as_mut_slice()[index - 1..=index]
+ .iter_mut()
+ .map(|n| n.as_mut().unwrap());
+ let left = PoolRef::make_mut(pool, children.next().unwrap());
+ let child = PoolRef::make_mut(pool, children.next().unwrap());
+ // Prepare the rebalanced node.
+ child.push_min(
+ left.children.last().unwrap().clone(),
+ self.keys[index - 1].clone(),
+ );
+ match child.remove_target(pool, target) {
+ Remove::NoChange => {
+ // Key wasn't there, we need to revert the steal.
+ child.pop_min();
+ return Remove::NoChange;
+ }
+ Remove::Removed(value) => {
+ // If we did remove something, we complete the rebalancing.
+ let (left_value, _) = left.pop_max();
+ self.keys[index - 1] = left_value;
+ out_value = value;
+ }
+ Remove::Update(value, new_child) => {
+ // If we did remove something, we complete the rebalancing.
+ let (left_value, _) = left.pop_max();
+ self.keys[index - 1] = left_value;
+ update = Some(new_child);
+ out_value = value;
+ }
+ }
+ }
+ if let Some(new_child) = update {
+ self.children[index] = Some(PoolRef::new(pool, new_child));
+ }
+ Remove::Removed(out_value)
+ }
+ RemoveAction::StealFromRight(index) => {
+ let mut update = None;
+ let out_value;
+ {
+ let mut children = self.children.as_mut_slice()[index..index + 2]
+ .iter_mut()
+ .map(|n| n.as_mut().unwrap());
+ let child = PoolRef::make_mut(pool, children.next().unwrap());
+ let right = PoolRef::make_mut(pool, children.next().unwrap());
+ // Prepare the rebalanced node.
+ child.push_max(right.children[0].clone(), self.keys[index].clone());
+ match child.remove_target(pool, target) {
+ Remove::NoChange => {
+ // Key wasn't there, we need to revert the steal.
+ child.pop_max();
+ return Remove::NoChange;
+ }
+ Remove::Removed(value) => {
+ // If we did remove something, we complete the rebalancing.
+ let (right_value, _) = right.pop_min();
+ self.keys[index] = right_value;
+ out_value = value;
+ }
+ Remove::Update(value, new_child) => {
+ // If we did remove something, we complete the rebalancing.
+ let (right_value, _) = right.pop_min();
+ self.keys[index] = right_value;
+ update = Some(new_child);
+ out_value = value;
+ }
+ }
+ }
+ if let Some(new_child) = update {
+ self.children[index] = Some(PoolRef::new(pool, new_child));
+ }
+ Remove::Removed(out_value)
+ }
+ RemoveAction::MergeFirst(index) => {
+ if let Ok(key) = target {
+ // Bail early if we're looking for a not existing key
+ match self.keys[index].cmp_keys(key) {
+ Ordering::Less if !self.child_contains(index + 1, key) => {
+ return Remove::NoChange
+ }
+ Ordering::Greater if !self.child_contains(index, key) => {
+ return Remove::NoChange
+ }
+ _ => (),
+ }
+ }
+ let left = self.children.remove(index).unwrap();
+ let right = mem::replace(&mut self.children[index], None).unwrap();
+ let middle = self.keys.remove(index);
+ let mut merged = Node::merge(
+ middle,
+ PoolRef::unwrap_or_clone(left),
+ PoolRef::unwrap_or_clone(right),
+ );
+ let update;
+ let out_value;
+ match merged.remove_target(pool, target) {
+ Remove::NoChange => {
+ panic!("nodes::btree::Node::remove: caught an absent key too late while merging");
+ }
+ Remove::Removed(value) => {
+ if self.keys.is_empty() {
+ return Remove::Update(value, merged);
+ }
+ update = merged;
+ out_value = value;
+ }
+ Remove::Update(value, new_child) => {
+ if self.keys.is_empty() {
+ return Remove::Update(value, new_child);
+ }
+ update = new_child;
+ out_value = value;
+ }
+ }
+ self.children[index] = Some(PoolRef::new(pool, update));
+ Remove::Removed(out_value)
+ }
+ RemoveAction::ContinueDown(index) => {
+ let mut update = None;
+ let out_value;
+ if let Some(&mut Some(ref mut child_ref)) = self.children.get_mut(index) {
+ let child = PoolRef::make_mut(pool, child_ref);
+ match child.remove_target(pool, target) {
+ Remove::NoChange => return Remove::NoChange,
+ Remove::Removed(value) => {
+ out_value = value;
+ }
+ Remove::Update(value, new_child) => {
+ update = Some(new_child);
+ out_value = value;
+ }
+ }
+ } else {
+ unreachable!()
+ }
+ if let Some(new_child) = update {
+ self.children[index] = Some(PoolRef::new(pool, new_child));
+ }
+ Remove::Removed(out_value)
+ }
+ }
+ }
+}
+
+// Iterator
+
+/// An iterator over an ordered set.
+pub struct Iter<'a, A> {
+ fwd_path: Vec<(&'a Node<A>, usize)>,
+ back_path: Vec<(&'a Node<A>, usize)>,
+ pub(crate) remaining: usize,
+}
+
+impl<'a, A: BTreeValue> Iter<'a, A> {
+ pub(crate) fn new<R, BK>(root: &'a Node<A>, size: usize, range: R) -> Self
+ where
+ R: RangeBounds<BK>,
+ A::Key: Borrow<BK>,
+ BK: Ord + ?Sized,
+ {
+ let fwd_path = match range.start_bound() {
+ Bound::Included(key) => root.path_next(key, Vec::new()),
+ Bound::Excluded(key) => {
+ let mut path = root.path_next(key, Vec::new());
+ if let Some(value) = Self::get(&path) {
+ if value.cmp_keys(key) == Ordering::Equal {
+ Self::step_forward(&mut path);
+ }
+ }
+ path
+ }
+ Bound::Unbounded => root.path_first(Vec::new()),
+ };
+ let back_path = match range.end_bound() {
+ Bound::Included(key) => root.path_prev(key, Vec::new()),
+ Bound::Excluded(key) => {
+ let mut path = root.path_prev(key, Vec::new());
+ if let Some(value) = Self::get(&path) {
+ if value.cmp_keys(key) == Ordering::Equal {
+ Self::step_back(&mut path);
+ }
+ }
+ path
+ }
+ Bound::Unbounded => root.path_last(Vec::new()),
+ };
+ Iter {
+ fwd_path,
+ back_path,
+ remaining: size,
+ }
+ }
+
+ fn get(path: &[(&'a Node<A>, usize)]) -> Option<&'a A> {
+ match path.last() {
+ Some((node, index)) => Some(&node.keys[*index]),
+ None => None,
+ }
+ }
+
+ fn step_forward(path: &mut Vec<(&'a Node<A>, usize)>) -> Option<&'a A> {
+ match path.pop() {
+ Some((node, index)) => {
+ let index = index + 1;
+ match node.children[index] {
+ // Child between current and next key -> step down
+ Some(ref child) => {
+ path.push((node, index));
+ path.push((child, 0));
+ let mut node = child;
+ while let Some(ref left_child) = node.children[0] {
+ path.push((left_child, 0));
+ node = left_child;
+ }
+ Some(&node.keys[0])
+ }
+ None => match node.keys.get(index) {
+ // Yield next key
+ value @ Some(_) => {
+ path.push((node, index));
+ value
+ }
+ // No more keys -> exhausted level, step up and yield
+ None => loop {
+ match path.pop() {
+ None => {
+ return None;
+ }
+ Some((node, index)) => {
+ if let value @ Some(_) = node.keys.get(index) {
+ path.push((node, index));
+ return value;
+ }
+ }
+ }
+ },
+ },
+ }
+ }
+ None => None,
+ }
+ }
+
+ fn step_back(path: &mut Vec<(&'a Node<A>, usize)>) -> Option<&'a A> {
+ match path.pop() {
+ Some((node, index)) => match node.children[index] {
+ Some(ref child) => {
+ path.push((node, index));
+ let mut end = child.keys.len() - 1;
+ path.push((child, end));
+ let mut node = child;
+ while let Some(ref right_child) = node.children[end + 1] {
+ end = right_child.keys.len() - 1;
+ path.push((right_child, end));
+ node = right_child;
+ }
+ Some(&node.keys[end])
+ }
+ None => {
+ if index == 0 {
+ loop {
+ match path.pop() {
+ None => {
+ return None;
+ }
+ Some((node, index)) => {
+ if index > 0 {
+ let index = index - 1;
+ path.push((node, index));
+ return Some(&node.keys[index]);
+ }
+ }
+ }
+ }
+ } else {
+ let index = index - 1;
+ path.push((node, index));
+ Some(&node.keys[index])
+ }
+ }
+ },
+ None => None,
+ }
+ }
+}
+
+impl<'a, A: 'a + BTreeValue> Iterator for Iter<'a, A> {
+ type Item = &'a A;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match Iter::get(&self.fwd_path) {
+ None => None,
+ Some(value) => match Iter::get(&self.back_path) {
+ Some(last_value) if value.cmp_values(last_value) == Ordering::Greater => None,
+ None => None,
+ Some(_) => {
+ Iter::step_forward(&mut self.fwd_path);
+ self.remaining -= 1;
+ Some(value)
+ }
+ },
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ // (0, Some(self.remaining))
+ (0, None)
+ }
+}
+
+impl<'a, A: 'a + BTreeValue> DoubleEndedIterator for Iter<'a, A> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ match Iter::get(&self.back_path) {
+ None => None,
+ Some(value) => match Iter::get(&self.fwd_path) {
+ Some(last_value) if value.cmp_values(last_value) == Ordering::Less => None,
+ None => None,
+ Some(_) => {
+ Iter::step_back(&mut self.back_path);
+ self.remaining -= 1;
+ Some(value)
+ }
+ },
+ }
+ }
+}
+
+// Consuming iterator
+
+enum ConsumingIterItem<A> {
+ Consider(Node<A>),
+ Yield(A),
+}
+
+/// A consuming iterator over an ordered set.
+pub struct ConsumingIter<A> {
+ fwd_last: Option<A>,
+ fwd_stack: Vec<ConsumingIterItem<A>>,
+ back_last: Option<A>,
+ back_stack: Vec<ConsumingIterItem<A>>,
+ remaining: usize,
+}
+
+impl<A: Clone> ConsumingIter<A> {
+ pub(crate) fn new(root: &Node<A>, total: usize) -> Self {
+ ConsumingIter {
+ fwd_last: None,
+ fwd_stack: vec![ConsumingIterItem::Consider(root.clone())],
+ back_last: None,
+ back_stack: vec![ConsumingIterItem::Consider(root.clone())],
+ remaining: total,
+ }
+ }
+
+ fn push_node(stack: &mut Vec<ConsumingIterItem<A>>, maybe_node: Option<PoolRef<Node<A>>>) {
+ if let Some(node) = maybe_node {
+ stack.push(ConsumingIterItem::Consider(PoolRef::unwrap_or_clone(node)))
+ }
+ }
+
+ fn push(stack: &mut Vec<ConsumingIterItem<A>>, mut node: Node<A>) {
+ for _n in 0..node.keys.len() {
+ ConsumingIter::push_node(stack, node.children.pop_back());
+ stack.push(ConsumingIterItem::Yield(node.keys.pop_back()));
+ }
+ ConsumingIter::push_node(stack, node.children.pop_back());
+ }
+
+ fn push_fwd(&mut self, node: Node<A>) {
+ ConsumingIter::push(&mut self.fwd_stack, node)
+ }
+
+ fn push_node_back(&mut self, maybe_node: Option<PoolRef<Node<A>>>) {
+ if let Some(node) = maybe_node {
+ self.back_stack
+ .push(ConsumingIterItem::Consider(PoolRef::unwrap_or_clone(node)))
+ }
+ }
+
+ fn push_back(&mut self, mut node: Node<A>) {
+ for _i in 0..node.keys.len() {
+ self.push_node_back(node.children.pop_front());
+ self.back_stack
+ .push(ConsumingIterItem::Yield(node.keys.pop_front()));
+ }
+ self.push_node_back(node.children.pop_back());
+ }
+}
+
+impl<A> Iterator for ConsumingIter<A>
+where
+ A: BTreeValue + Clone,
+{
+ type Item = A;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ match self.fwd_stack.pop() {
+ None => {
+ self.remaining = 0;
+ return None;
+ }
+ Some(ConsumingIterItem::Consider(node)) => self.push_fwd(node),
+ Some(ConsumingIterItem::Yield(value)) => {
+ if let Some(ref last) = self.back_last {
+ if value.cmp_values(last) != Ordering::Less {
+ self.fwd_stack.clear();
+ self.back_stack.clear();
+ self.remaining = 0;
+ return None;
+ }
+ }
+ self.remaining -= 1;
+ self.fwd_last = Some(value.clone());
+ return Some(value);
+ }
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining, Some(self.remaining))
+ }
+}
+
+impl<A> DoubleEndedIterator for ConsumingIter<A>
+where
+ A: BTreeValue + Clone,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ loop {
+ match self.back_stack.pop() {
+ None => {
+ self.remaining = 0;
+ return None;
+ }
+ Some(ConsumingIterItem::Consider(node)) => self.push_back(node),
+ Some(ConsumingIterItem::Yield(value)) => {
+ if let Some(ref last) = self.fwd_last {
+ if value.cmp_values(last) != Ordering::Greater {
+ self.fwd_stack.clear();
+ self.back_stack.clear();
+ self.remaining = 0;
+ return None;
+ }
+ }
+ self.remaining -= 1;
+ self.back_last = Some(value.clone());
+ return Some(value);
+ }
+ }
+ }
+ }
+}
+
+impl<A: BTreeValue + Clone> ExactSizeIterator for ConsumingIter<A> {}
+
+// DiffIter
+
+/// An iterator over the differences between two ordered sets.
+pub struct DiffIter<'a, A> {
+ old_stack: Vec<IterItem<'a, A>>,
+ new_stack: Vec<IterItem<'a, A>>,
+}
+
+/// A description of a difference between two ordered sets.
+#[derive(PartialEq, Eq, Debug)]
+pub enum DiffItem<'a, A> {
+ /// This value has been added to the new set.
+ Add(&'a A),
+ /// This value has been changed between the two sets.
+ Update {
+ /// The old value.
+ old: &'a A,
+ /// The new value.
+ new: &'a A,
+ },
+ /// This value has been removed from the new set.
+ Remove(&'a A),
+}
+
+enum IterItem<'a, A> {
+ Consider(&'a Node<A>),
+ Yield(&'a A),
+}
+
+impl<'a, A: 'a> DiffIter<'a, A> {
+ pub(crate) fn new(old: &'a Node<A>, new: &'a Node<A>) -> Self {
+ DiffIter {
+ old_stack: if old.keys.is_empty() {
+ Vec::new()
+ } else {
+ vec![IterItem::Consider(old)]
+ },
+ new_stack: if new.keys.is_empty() {
+ Vec::new()
+ } else {
+ vec![IterItem::Consider(new)]
+ },
+ }
+ }
+
+ fn push_node(stack: &mut Vec<IterItem<'a, A>>, maybe_node: &'a Option<PoolRef<Node<A>>>) {
+ if let Some(ref node) = *maybe_node {
+ stack.push(IterItem::Consider(node))
+ }
+ }
+
+ fn push(stack: &mut Vec<IterItem<'a, A>>, node: &'a Node<A>) {
+ for n in 0..node.keys.len() {
+ let i = node.keys.len() - n;
+ Self::push_node(stack, &node.children[i]);
+ stack.push(IterItem::Yield(&node.keys[i - 1]));
+ }
+ Self::push_node(stack, &node.children[0]);
+ }
+}
+
+impl<'a, A> Iterator for DiffIter<'a, A>
+where
+ A: 'a + BTreeValue + PartialEq,
+{
+ type Item = DiffItem<'a, A>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ match (self.old_stack.pop(), self.new_stack.pop()) {
+ (None, None) => return None,
+ (None, Some(new)) => match new {
+ IterItem::Consider(new) => Self::push(&mut self.new_stack, new),
+ IterItem::Yield(new) => return Some(DiffItem::Add(new)),
+ },
+ (Some(old), None) => match old {
+ IterItem::Consider(old) => Self::push(&mut self.old_stack, old),
+ IterItem::Yield(old) => return Some(DiffItem::Remove(old)),
+ },
+ (Some(old), Some(new)) => match (old, new) {
+ (IterItem::Consider(old), IterItem::Consider(new)) => {
+ if !std::ptr::eq(old, new) {
+ match old.keys[0].cmp_values(&new.keys[0]) {
+ Ordering::Less => {
+ Self::push(&mut self.old_stack, old);
+ self.new_stack.push(IterItem::Consider(new));
+ }
+ Ordering::Greater => {
+ self.old_stack.push(IterItem::Consider(old));
+ Self::push(&mut self.new_stack, new);
+ }
+ Ordering::Equal => {
+ Self::push(&mut self.old_stack, old);
+ Self::push(&mut self.new_stack, new);
+ }
+ }
+ }
+ }
+ (IterItem::Consider(old), IterItem::Yield(new)) => {
+ Self::push(&mut self.old_stack, old);
+ self.new_stack.push(IterItem::Yield(new));
+ }
+ (IterItem::Yield(old), IterItem::Consider(new)) => {
+ self.old_stack.push(IterItem::Yield(old));
+ Self::push(&mut self.new_stack, new);
+ }
+ (IterItem::Yield(old), IterItem::Yield(new)) => match old.cmp_values(new) {
+ Ordering::Less => {
+ self.new_stack.push(IterItem::Yield(new));
+ return Some(DiffItem::Remove(old));
+ }
+ Ordering::Equal => {
+ if old != new {
+ return Some(DiffItem::Update { old, new });
+ }
+ }
+ Ordering::Greater => {
+ self.old_stack.push(IterItem::Yield(old));
+ return Some(DiffItem::Add(new));
+ }
+ },
+ },
+ }
+ }
+ }
+}
diff --git a/vendor/im-rc/src/nodes/hamt.rs b/vendor/im-rc/src/nodes/hamt.rs
new file mode 100644
index 000000000..945068be3
--- /dev/null
+++ b/vendor/im-rc/src/nodes/hamt.rs
@@ -0,0 +1,726 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use std::borrow::Borrow;
+use std::fmt;
+use std::hash::{BuildHasher, Hash, Hasher};
+use std::iter::FusedIterator;
+use std::slice::{Iter as SliceIter, IterMut as SliceIterMut};
+use std::{mem, ptr};
+
+use bitmaps::Bits;
+use sized_chunks::sparse_chunk::{Iter as ChunkIter, IterMut as ChunkIterMut, SparseChunk};
+use typenum::{Pow, Unsigned, U2};
+
+use crate::config::HashLevelSize;
+use crate::util::{clone_ref, Pool, PoolClone, PoolDefault, PoolRef, Ref};
+
+pub(crate) type HashWidth = <U2 as Pow<HashLevelSize>>::Output;
+pub(crate) type HashBits = <HashWidth as Bits>::Store; // a uint of HASH_SIZE bits
+pub(crate) const HASH_SHIFT: usize = HashLevelSize::USIZE;
+pub(crate) const HASH_WIDTH: usize = HashWidth::USIZE;
+pub(crate) const HASH_MASK: HashBits = (HASH_WIDTH - 1) as HashBits;
+
+pub(crate) fn hash_key<K: Hash + ?Sized, S: BuildHasher>(bh: &S, key: &K) -> HashBits {
+ let mut hasher = bh.build_hasher();
+ key.hash(&mut hasher);
+ hasher.finish() as HashBits
+}
+
+#[inline]
+fn mask(hash: HashBits, shift: usize) -> HashBits {
+ hash >> shift & HASH_MASK
+}
+
+pub trait HashValue {
+ type Key: Eq;
+
+ fn extract_key(&self) -> &Self::Key;
+ fn ptr_eq(&self, other: &Self) -> bool;
+}
+
+#[derive(Clone)]
+pub(crate) struct Node<A> {
+ data: SparseChunk<Entry<A>, HashWidth>,
+}
+
+#[allow(unsafe_code)]
+impl<A> PoolDefault for Node<A> {
+ #[cfg(feature = "pool")]
+ unsafe fn default_uninit(target: &mut mem::MaybeUninit<Self>) {
+ SparseChunk::default_uninit(
+ target
+ .as_mut_ptr()
+ .cast::<mem::MaybeUninit<SparseChunk<Entry<A>, HashWidth>>>()
+ .as_mut()
+ .unwrap(),
+ )
+ }
+}
+
+#[allow(unsafe_code)]
+impl<A> PoolClone for Node<A>
+where
+ A: Clone,
+{
+ #[cfg(feature = "pool")]
+ unsafe fn clone_uninit(&self, target: &mut mem::MaybeUninit<Self>) {
+ self.data.clone_uninit(
+ target
+ .as_mut_ptr()
+ .cast::<mem::MaybeUninit<SparseChunk<Entry<A>, HashWidth>>>()
+ .as_mut()
+ .unwrap(),
+ )
+ }
+}
+
+#[derive(Clone)]
+pub(crate) struct CollisionNode<A> {
+ hash: HashBits,
+ data: Vec<A>,
+}
+
+pub(crate) enum Entry<A> {
+ Value(A, HashBits),
+ Collision(Ref<CollisionNode<A>>),
+ Node(PoolRef<Node<A>>),
+}
+
+impl<A: Clone> Clone for Entry<A> {
+ fn clone(&self) -> Self {
+ match self {
+ Entry::Value(value, hash) => Entry::Value(value.clone(), *hash),
+ Entry::Collision(coll) => Entry::Collision(coll.clone()),
+ Entry::Node(node) => Entry::Node(node.clone()),
+ }
+ }
+}
+
+impl<A> Entry<A> {
+ fn is_value(&self) -> bool {
+ matches!(self, Entry::Value(_, _))
+ }
+
+ fn unwrap_value(self) -> A {
+ match self {
+ Entry::Value(a, _) => a,
+ _ => panic!("nodes::hamt::Entry::unwrap_value: unwrapped a non-value"),
+ }
+ }
+
+ fn from_node(pool: &Pool<Node<A>>, node: Node<A>) -> Self {
+ Entry::Node(PoolRef::new(pool, node))
+ }
+}
+
+impl<A> From<CollisionNode<A>> for Entry<A> {
+ fn from(node: CollisionNode<A>) -> Self {
+ Entry::Collision(Ref::new(node))
+ }
+}
+
+impl<A> Default for Node<A> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<A> Node<A> {
+ #[inline]
+ pub(crate) fn new() -> Self {
+ Node {
+ data: SparseChunk::new(),
+ }
+ }
+
+ #[inline]
+ fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[inline]
+ pub(crate) fn unit(index: usize, value: Entry<A>) -> Self {
+ Node {
+ data: SparseChunk::unit(index, value),
+ }
+ }
+
+ #[inline]
+ pub(crate) fn pair(index1: usize, value1: Entry<A>, index2: usize, value2: Entry<A>) -> Self {
+ Node {
+ data: SparseChunk::pair(index1, value1, index2, value2),
+ }
+ }
+
+ #[inline]
+ pub(crate) fn single_child(pool: &Pool<Node<A>>, index: usize, node: Self) -> Self {
+ Node {
+ data: SparseChunk::unit(index, Entry::from_node(pool, node)),
+ }
+ }
+
+ fn pop(&mut self) -> Entry<A> {
+ self.data.pop().unwrap()
+ }
+}
+
+impl<A: HashValue> Node<A> {
+ fn merge_values(
+ pool: &Pool<Node<A>>,
+ value1: A,
+ hash1: HashBits,
+ value2: A,
+ hash2: HashBits,
+ shift: usize,
+ ) -> Self {
+ let index1 = mask(hash1, shift) as usize;
+ let index2 = mask(hash2, shift) as usize;
+ if index1 != index2 {
+ // Both values fit on the same level.
+ Node::pair(
+ index1,
+ Entry::Value(value1, hash1),
+ index2,
+ Entry::Value(value2, hash2),
+ )
+ } else if shift + HASH_SHIFT >= HASH_WIDTH {
+ // If we're at the bottom, we've got a collision.
+ Node::unit(
+ index1,
+ Entry::from(CollisionNode::new(hash1, value1, value2)),
+ )
+ } else {
+ // Pass the values down a level.
+ let node = Node::merge_values(pool, value1, hash1, value2, hash2, shift + HASH_SHIFT);
+ Node::single_child(pool, index1, node)
+ }
+ }
+
+ pub(crate) fn get<BK>(&self, hash: HashBits, shift: usize, key: &BK) -> Option<&A>
+ where
+ BK: Eq + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ let index = mask(hash, shift) as usize;
+ if let Some(entry) = self.data.get(index) {
+ match entry {
+ Entry::Value(ref value, _) => {
+ if key == value.extract_key().borrow() {
+ Some(value)
+ } else {
+ None
+ }
+ }
+ Entry::Collision(ref coll) => coll.get(key),
+ Entry::Node(ref child) => child.get(hash, shift + HASH_SHIFT, key),
+ }
+ } else {
+ None
+ }
+ }
+
+ pub(crate) fn get_mut<BK>(
+ &mut self,
+ pool: &Pool<Node<A>>,
+ hash: HashBits,
+ shift: usize,
+ key: &BK,
+ ) -> Option<&mut A>
+ where
+ A: Clone,
+ BK: Eq + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ let index = mask(hash, shift) as usize;
+ if let Some(entry) = self.data.get_mut(index) {
+ match entry {
+ Entry::Value(ref mut value, _) => {
+ if key == value.extract_key().borrow() {
+ Some(value)
+ } else {
+ None
+ }
+ }
+ Entry::Collision(ref mut coll_ref) => {
+ let coll = Ref::make_mut(coll_ref);
+ coll.get_mut(key)
+ }
+ Entry::Node(ref mut child_ref) => {
+ let child = PoolRef::make_mut(pool, child_ref);
+ child.get_mut(pool, hash, shift + HASH_SHIFT, key)
+ }
+ }
+ } else {
+ None
+ }
+ }
+
+ pub(crate) fn insert(
+ &mut self,
+ pool: &Pool<Node<A>>,
+ hash: HashBits,
+ shift: usize,
+ value: A,
+ ) -> Option<A>
+ where
+ A: Clone,
+ {
+ let index = mask(hash, shift) as usize;
+ if let Some(entry) = self.data.get_mut(index) {
+ let mut fallthrough = false;
+ // Value is here
+ match entry {
+ // Update value or create a subtree
+ Entry::Value(ref current, _) => {
+ if current.extract_key() == value.extract_key() {
+ // If we have a key match, fall through to the outer
+ // level where we replace the current value. If we
+ // don't, fall through to the inner level where we merge
+ // some nodes.
+ fallthrough = true;
+ }
+ }
+ // There's already a collision here.
+ Entry::Collision(ref mut collision) => {
+ let coll = Ref::make_mut(collision);
+ return coll.insert(value);
+ }
+ Entry::Node(ref mut child_ref) => {
+ // Child node
+ let child = PoolRef::make_mut(pool, child_ref);
+ return child.insert(pool, hash, shift + HASH_SHIFT, value);
+ }
+ }
+ if !fallthrough {
+ // If we get here, we're looking at a value entry that needs a merge.
+ // We're going to be unsafe and pry it out of the reference, trusting
+ // that we overwrite it with the merged node.
+ #[allow(unsafe_code)]
+ let old_entry = unsafe { ptr::read(entry) };
+ if shift + HASH_SHIFT >= HASH_WIDTH {
+ // We're at the lowest level, need to set up a collision node.
+ let coll = CollisionNode::new(hash, old_entry.unwrap_value(), value);
+ #[allow(unsafe_code)]
+ unsafe {
+ ptr::write(entry, Entry::from(coll))
+ };
+ } else if let Entry::Value(old_value, old_hash) = old_entry {
+ let node = Node::merge_values(
+ pool,
+ old_value,
+ old_hash,
+ value,
+ hash,
+ shift + HASH_SHIFT,
+ );
+ #[allow(unsafe_code)]
+ unsafe {
+ ptr::write(entry, Entry::from_node(pool, node))
+ };
+ } else {
+ unreachable!()
+ }
+ return None;
+ }
+ }
+ // If we get here, either we found nothing at this index, in which case
+ // we insert a new entry, or we hit a value entry with the same key, in
+ // which case we replace it.
+ self.data
+ .insert(index, Entry::Value(value, hash))
+ .map(Entry::unwrap_value)
+ }
+
+ pub(crate) fn remove<BK>(
+ &mut self,
+ pool: &Pool<Node<A>>,
+ hash: HashBits,
+ shift: usize,
+ key: &BK,
+ ) -> Option<A>
+ where
+ A: Clone,
+ BK: Eq + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ let index = mask(hash, shift) as usize;
+ let mut new_node = None;
+ let mut removed = None;
+ if let Some(entry) = self.data.get_mut(index) {
+ match entry {
+ Entry::Value(ref value, _) => {
+ if key != value.extract_key().borrow() {
+ // Key wasn't in the map.
+ return None;
+ } // Otherwise, fall through to the removal.
+ }
+ Entry::Collision(ref mut coll_ref) => {
+ let coll = Ref::make_mut(coll_ref);
+ removed = coll.remove(key);
+ if coll.len() == 1 {
+ new_node = Some(coll.pop());
+ } else {
+ return removed;
+ }
+ }
+ Entry::Node(ref mut child_ref) => {
+ let child = PoolRef::make_mut(pool, child_ref);
+ match child.remove(pool, hash, shift + HASH_SHIFT, key) {
+ None => {
+ return None;
+ }
+ Some(value) => {
+ if child.len() == 1
+ && child.data[child.data.first_index().unwrap()].is_value()
+ {
+ // If the child now contains only a single value node,
+ // pull it up one level and discard the child.
+ removed = Some(value);
+ new_node = Some(child.pop());
+ } else {
+ return Some(value);
+ }
+ }
+ }
+ }
+ }
+ }
+ if let Some(node) = new_node {
+ self.data.insert(index, node);
+ return removed;
+ }
+ self.data.remove(index).map(Entry::unwrap_value)
+ }
+}
+
+impl<A: HashValue> CollisionNode<A> {
+ fn new(hash: HashBits, value1: A, value2: A) -> Self {
+ CollisionNode {
+ hash,
+ data: vec![value1, value2],
+ }
+ }
+
+ #[inline]
+ fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ fn get<BK>(&self, key: &BK) -> Option<&A>
+ where
+ BK: Eq + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ for entry in &self.data {
+ if key == entry.extract_key().borrow() {
+ return Some(entry);
+ }
+ }
+ None
+ }
+
+ fn get_mut<BK>(&mut self, key: &BK) -> Option<&mut A>
+ where
+ BK: Eq + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ for entry in &mut self.data {
+ if key == entry.extract_key().borrow() {
+ return Some(entry);
+ }
+ }
+ None
+ }
+
+ fn insert(&mut self, value: A) -> Option<A> {
+ for item in &mut self.data {
+ if value.extract_key() == item.extract_key() {
+ return Some(mem::replace(item, value));
+ }
+ }
+ self.data.push(value);
+ None
+ }
+
+ fn remove<BK>(&mut self, key: &BK) -> Option<A>
+ where
+ BK: Eq + ?Sized,
+ A::Key: Borrow<BK>,
+ {
+ let mut loc = None;
+ for (index, item) in self.data.iter().enumerate() {
+ if key == item.extract_key().borrow() {
+ loc = Some(index);
+ }
+ }
+ if let Some(index) = loc {
+ Some(self.data.remove(index))
+ } else {
+ None
+ }
+ }
+
+ fn pop(&mut self) -> Entry<A> {
+ Entry::Value(self.data.pop().unwrap(), self.hash)
+ }
+}
+
+// Ref iterator
+
+pub(crate) struct Iter<'a, A> {
+ count: usize,
+ stack: Vec<ChunkIter<'a, Entry<A>, HashWidth>>,
+ current: ChunkIter<'a, Entry<A>, HashWidth>,
+ collision: Option<(HashBits, SliceIter<'a, A>)>,
+}
+
+impl<'a, A> Iter<'a, A>
+where
+ A: 'a,
+{
+ pub(crate) fn new(root: &'a Node<A>, size: usize) -> Self {
+ Iter {
+ count: size,
+ stack: Vec::with_capacity((HASH_WIDTH / HASH_SHIFT) + 1),
+ current: root.data.iter(),
+ collision: None,
+ }
+ }
+}
+
+impl<'a, A> Iterator for Iter<'a, A>
+where
+ A: 'a,
+{
+ type Item = (&'a A, HashBits);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.count == 0 {
+ return None;
+ }
+ if self.collision.is_some() {
+ if let Some((hash, ref mut coll)) = self.collision {
+ match coll.next() {
+ None => {}
+ Some(value) => {
+ self.count -= 1;
+ return Some((value, hash));
+ }
+ }
+ }
+ self.collision = None;
+ return self.next();
+ }
+ match self.current.next() {
+ Some(Entry::Value(value, hash)) => {
+ self.count -= 1;
+ Some((value, *hash))
+ }
+ Some(Entry::Node(child)) => {
+ let current = mem::replace(&mut self.current, child.data.iter());
+ self.stack.push(current);
+ self.next()
+ }
+ Some(Entry::Collision(coll)) => {
+ self.collision = Some((coll.hash, coll.data.iter()));
+ self.next()
+ }
+ None => match self.stack.pop() {
+ None => None,
+ Some(iter) => {
+ self.current = iter;
+ self.next()
+ }
+ },
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.count, Some(self.count))
+ }
+}
+
+impl<'a, A> ExactSizeIterator for Iter<'a, A> where A: 'a {}
+
+impl<'a, A> FusedIterator for Iter<'a, A> where A: 'a {}
+
+// Mut ref iterator
+
+pub(crate) struct IterMut<'a, A> {
+ count: usize,
+ pool: Pool<Node<A>>,
+ stack: Vec<ChunkIterMut<'a, Entry<A>, HashWidth>>,
+ current: ChunkIterMut<'a, Entry<A>, HashWidth>,
+ collision: Option<(HashBits, SliceIterMut<'a, A>)>,
+}
+
+impl<'a, A> IterMut<'a, A>
+where
+ A: 'a,
+{
+ pub(crate) fn new(pool: &Pool<Node<A>>, root: &'a mut Node<A>, size: usize) -> Self {
+ IterMut {
+ count: size,
+ pool: pool.clone(),
+ stack: Vec::with_capacity((HASH_WIDTH / HASH_SHIFT) + 1),
+ current: root.data.iter_mut(),
+ collision: None,
+ }
+ }
+}
+
+impl<'a, A> Iterator for IterMut<'a, A>
+where
+ A: Clone + 'a,
+{
+ type Item = (&'a mut A, HashBits);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.count == 0 {
+ return None;
+ }
+ if self.collision.is_some() {
+ if let Some((hash, ref mut coll)) = self.collision {
+ match coll.next() {
+ None => {}
+ Some(value) => {
+ self.count -= 1;
+ return Some((value, hash));
+ }
+ }
+ }
+ self.collision = None;
+ return self.next();
+ }
+ match self.current.next() {
+ Some(Entry::Value(value, hash)) => {
+ self.count -= 1;
+ Some((value, *hash))
+ }
+ Some(Entry::Node(child_ref)) => {
+ let child = PoolRef::make_mut(&self.pool, child_ref);
+ let current = mem::replace(&mut self.current, child.data.iter_mut());
+ self.stack.push(current);
+ self.next()
+ }
+ Some(Entry::Collision(coll_ref)) => {
+ let coll = Ref::make_mut(coll_ref);
+ self.collision = Some((coll.hash, coll.data.iter_mut()));
+ self.next()
+ }
+ None => match self.stack.pop() {
+ None => None,
+ Some(iter) => {
+ self.current = iter;
+ self.next()
+ }
+ },
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.count, Some(self.count))
+ }
+}
+
+impl<'a, A> ExactSizeIterator for IterMut<'a, A> where A: Clone + 'a {}
+
+impl<'a, A> FusedIterator for IterMut<'a, A> where A: Clone + 'a {}
+
+// Consuming iterator
+
+pub(crate) struct Drain<A>
+where
+ A: HashValue,
+{
+ count: usize,
+ pool: Pool<Node<A>>,
+ stack: Vec<PoolRef<Node<A>>>,
+ current: PoolRef<Node<A>>,
+ collision: Option<CollisionNode<A>>,
+}
+
+impl<A> Drain<A>
+where
+ A: HashValue,
+{
+ pub(crate) fn new(pool: &Pool<Node<A>>, root: PoolRef<Node<A>>, size: usize) -> Self {
+ Drain {
+ count: size,
+ pool: pool.clone(),
+ stack: vec![],
+ current: root,
+ collision: None,
+ }
+ }
+}
+
+impl<A> Iterator for Drain<A>
+where
+ A: HashValue + Clone,
+{
+ type Item = (A, HashBits);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.count == 0 {
+ return None;
+ }
+ if self.collision.is_some() {
+ if let Some(ref mut coll) = self.collision {
+ if let Some(value) = coll.data.pop() {
+ self.count -= 1;
+ return Some((value, coll.hash));
+ }
+ }
+ self.collision = None;
+ return self.next();
+ }
+ match PoolRef::make_mut(&self.pool, &mut self.current).data.pop() {
+ Some(Entry::Value(value, hash)) => {
+ self.count -= 1;
+ Some((value, hash))
+ }
+ Some(Entry::Collision(coll_ref)) => {
+ self.collision = Some(clone_ref(coll_ref));
+ self.next()
+ }
+ Some(Entry::Node(child)) => {
+ let parent = mem::replace(&mut self.current, child);
+ self.stack.push(parent);
+ self.next()
+ }
+ None => match self.stack.pop() {
+ None => None,
+ Some(parent) => {
+ self.current = parent;
+ self.next()
+ }
+ },
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.count, Some(self.count))
+ }
+}
+
+impl<A: HashValue> ExactSizeIterator for Drain<A> where A: Clone {}
+
+impl<A: HashValue> FusedIterator for Drain<A> where A: Clone {}
+
+impl<A: HashValue + fmt::Debug> fmt::Debug for Node<A> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
+ write!(f, "Node[ ")?;
+ for i in self.data.indices() {
+ write!(f, "{}: ", i)?;
+ match &self.data[i] {
+ Entry::Value(v, h) => write!(f, "{:?} :: {}, ", v, h)?,
+ Entry::Collision(c) => write!(f, "Coll{:?} :: {}", c.data, c.hash)?,
+ Entry::Node(n) => write!(f, "{:?}, ", n)?,
+ }
+ }
+ write!(f, " ]")
+ }
+}
diff --git a/vendor/im-rc/src/nodes/mod.rs b/vendor/im-rc/src/nodes/mod.rs
new file mode 100644
index 000000000..24e745504
--- /dev/null
+++ b/vendor/im-rc/src/nodes/mod.rs
@@ -0,0 +1,16 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+pub(crate) mod btree;
+pub(crate) mod hamt;
+pub(crate) mod rrb;
+
+pub(crate) mod chunk {
+ use crate::config::VectorChunkSize;
+ use sized_chunks as sc;
+ use typenum::Unsigned;
+
+ pub(crate) type Chunk<A> = sc::sized_chunk::Chunk<A, VectorChunkSize>;
+ pub(crate) const CHUNK_SIZE: usize = VectorChunkSize::USIZE;
+}
diff --git a/vendor/im-rc/src/nodes/rrb.rs b/vendor/im-rc/src/nodes/rrb.rs
new file mode 100644
index 000000000..8809b84b8
--- /dev/null
+++ b/vendor/im-rc/src/nodes/rrb.rs
@@ -0,0 +1,1101 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use std::mem::replace;
+use std::ops::Range;
+
+use crate::nodes::chunk::{Chunk, CHUNK_SIZE};
+use crate::util::{
+ Pool, PoolRef,
+ Side::{self, Left, Right},
+};
+use crate::vector::RRBPool;
+
+use self::Entry::*;
+
+pub(crate) const NODE_SIZE: usize = CHUNK_SIZE;
+
+#[derive(Debug)]
+enum Size {
+ Size(usize),
+ Table(PoolRef<Chunk<usize>>),
+}
+
+impl Clone for Size {
+ fn clone(&self) -> Self {
+ match *self {
+ Size::Size(size) => Size::Size(size),
+ Size::Table(ref table) => Size::Table(table.clone()),
+ }
+ }
+}
+
+impl Size {
+ fn size(&self) -> usize {
+ match self {
+ Size::Size(s) => *s,
+ Size::Table(sizes) => *sizes.last().unwrap_or(&0),
+ }
+ }
+
+ fn is_size(&self) -> bool {
+ match self {
+ Size::Size(_) => true,
+ Size::Table(_) => false,
+ }
+ }
+
+ fn table_from_size(pool: &Pool<Chunk<usize>>, level: usize, size: usize) -> Self {
+ let mut chunk = Chunk::new();
+ let mut remaining = size;
+ if let Some(child_size) = NODE_SIZE.checked_pow(level as u32) {
+ while remaining > child_size {
+ let next_value = chunk.last().unwrap_or(&0) + child_size;
+ chunk.push_back(next_value);
+ remaining -= child_size;
+ }
+ }
+ if remaining > 0 {
+ let next_value = chunk.last().unwrap_or(&0) + remaining;
+ chunk.push_back(next_value);
+ }
+ Size::Table(PoolRef::new(pool, chunk))
+ }
+
+ fn push(&mut self, pool: &Pool<Chunk<usize>>, side: Side, level: usize, value: usize) {
+ let size = match self {
+ Size::Size(ref mut size) => match side {
+ Left => *size,
+ Right => {
+ *size += value;
+ return;
+ }
+ },
+ Size::Table(ref mut size_ref) => {
+ let size_table = PoolRef::make_mut(pool, size_ref);
+ debug_assert!(size_table.len() < NODE_SIZE);
+ match side {
+ Left => {
+ for entry in size_table.iter_mut() {
+ *entry += value;
+ }
+ size_table.push_front(value);
+ }
+ Right => {
+ let prev = *(size_table.last().unwrap_or(&0));
+ size_table.push_back(value + prev);
+ }
+ }
+ return;
+ }
+ };
+ *self = Size::table_from_size(pool, level, size);
+ self.push(pool, side, level, value);
+ }
+
+ fn pop(&mut self, pool: &Pool<Chunk<usize>>, side: Side, level: usize, value: usize) {
+ let size = match self {
+ Size::Size(ref mut size) => match side {
+ Left => *size,
+ Right => {
+ *size -= value;
+ return;
+ }
+ },
+ Size::Table(ref mut size_ref) => {
+ let size_table = PoolRef::make_mut(pool, size_ref);
+ match side {
+ Left => {
+ let first = size_table.pop_front();
+ debug_assert_eq!(value, first);
+ for entry in size_table.iter_mut() {
+ *entry -= value;
+ }
+ }
+ Right => {
+ let pop = size_table.pop_back();
+ let last = size_table.last().unwrap_or(&0);
+ debug_assert_eq!(value, pop - last);
+ }
+ }
+ return;
+ }
+ };
+ *self = Size::table_from_size(pool, level, size);
+ self.pop(pool, side, level, value);
+ }
+
+ fn update(&mut self, pool: &Pool<Chunk<usize>>, index: usize, level: usize, value: isize) {
+ let size = match self {
+ Size::Size(ref size) => *size,
+ Size::Table(ref mut size_ref) => {
+ let size_table = PoolRef::make_mut(pool, size_ref);
+ for entry in size_table.iter_mut().skip(index) {
+ *entry = (*entry as isize + value) as usize;
+ }
+ return;
+ }
+ };
+ *self = Size::table_from_size(pool, level, size);
+ self.update(pool, index, level, value);
+ }
+}
+
+pub(crate) enum PushResult<A> {
+ Full(A, usize),
+ Done,
+}
+
+pub(crate) enum PopResult<A> {
+ Done(A),
+ Drained(A),
+ Empty,
+}
+
+pub(crate) enum SplitResult {
+ Dropped(usize),
+ OutOfBounds,
+}
+
+// Invariants: Nodes only at level > 0, Values/Empty only at level = 0
+enum Entry<A> {
+ Nodes(Size, PoolRef<Chunk<Node<A>>>),
+ Values(PoolRef<Chunk<A>>),
+ Empty,
+}
+
+impl<A: Clone> Clone for Entry<A> {
+ fn clone(&self) -> Self {
+ match *self {
+ Nodes(ref size, ref nodes) => Nodes(size.clone(), nodes.clone()),
+ Values(ref values) => Values(values.clone()),
+ Empty => Empty,
+ }
+ }
+}
+
+impl<A: Clone> Entry<A> {
+ fn len(&self) -> usize {
+ match self {
+ Nodes(_, ref nodes) => nodes.len(),
+ Values(ref values) => values.len(),
+ Empty => 0,
+ }
+ }
+
+ fn is_full(&self) -> bool {
+ match self {
+ Nodes(_, ref nodes) => nodes.is_full(),
+ Values(ref values) => values.is_full(),
+ Empty => false,
+ }
+ }
+
+ fn unwrap_values(&self) -> &Chunk<A> {
+ match self {
+ Values(ref values) => values,
+ _ => panic!("rrb::Entry::unwrap_values: expected values, found nodes"),
+ }
+ }
+
+ fn unwrap_nodes(&self) -> &Chunk<Node<A>> {
+ match self {
+ Nodes(_, ref nodes) => nodes,
+ _ => panic!("rrb::Entry::unwrap_nodes: expected nodes, found values"),
+ }
+ }
+
+ fn unwrap_values_mut(&mut self, pool: &RRBPool<A>) -> &mut Chunk<A> {
+ match self {
+ Values(ref mut values) => PoolRef::make_mut(&pool.value_pool, values),
+ _ => panic!("rrb::Entry::unwrap_values_mut: expected values, found nodes"),
+ }
+ }
+
+ fn unwrap_nodes_mut(&mut self, pool: &RRBPool<A>) -> &mut Chunk<Node<A>> {
+ match self {
+ Nodes(_, ref mut nodes) => PoolRef::make_mut(&pool.node_pool, nodes),
+ _ => panic!("rrb::Entry::unwrap_nodes_mut: expected nodes, found values"),
+ }
+ }
+
+ fn values(self) -> Chunk<A> {
+ match self {
+ Values(values) => PoolRef::unwrap_or_clone(values),
+ _ => panic!("rrb::Entry::values: expected values, found nodes"),
+ }
+ }
+
+ fn nodes(self) -> Chunk<Node<A>> {
+ match self {
+ Nodes(_, nodes) => PoolRef::unwrap_or_clone(nodes),
+ _ => panic!("rrb::Entry::nodes: expected nodes, found values"),
+ }
+ }
+
+ fn is_empty_node(&self) -> bool {
+ matches!(self, Empty)
+ }
+}
+
+// Node
+
+pub(crate) struct Node<A> {
+ children: Entry<A>,
+}
+
+impl<A: Clone> Clone for Node<A> {
+ fn clone(&self) -> Self {
+ Node {
+ children: self.children.clone(),
+ }
+ }
+}
+
+impl<A: Clone> Default for Node<A> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<A: Clone> Node<A> {
+ pub(crate) fn new() -> Self {
+ Node { children: Empty }
+ }
+
+ pub(crate) fn parent(pool: &RRBPool<A>, level: usize, children: Chunk<Self>) -> Self {
+ let size = {
+ let mut size = Size::Size(0);
+ let mut it = children.iter().peekable();
+ loop {
+ match it.next() {
+ None => break,
+ Some(child) => {
+ if size.is_size()
+ && !child.is_completely_dense(level - 1)
+ && it.peek().is_some()
+ {
+ size = Size::table_from_size(&pool.size_pool, level, size.size());
+ }
+ size.push(&pool.size_pool, Right, level, child.len())
+ }
+ }
+ }
+ size
+ };
+ Node {
+ children: Nodes(size, PoolRef::new(&pool.node_pool, children)),
+ }
+ }
+
+ pub(crate) fn clear_node(&mut self) {
+ self.children = Empty;
+ }
+
+ pub(crate) fn from_chunk(pool: &RRBPool<A>, level: usize, chunk: PoolRef<Chunk<A>>) -> Self {
+ let node = Node {
+ children: Values(chunk),
+ };
+ node.elevate(pool, level)
+ }
+
+ pub(crate) fn single_parent(pool: &RRBPool<A>, node: Self) -> Self {
+ let size = if node.is_dense() {
+ Size::Size(node.len())
+ } else {
+ let size_table = Chunk::unit(node.len());
+ Size::Table(PoolRef::new(&pool.size_pool, size_table))
+ };
+ let children = PoolRef::new(&pool.node_pool, Chunk::unit(node));
+ Node {
+ children: Nodes(size, children),
+ }
+ }
+
+ pub(crate) fn join_dense(pool: &RRBPool<A>, left: Self, right: Self) -> Self {
+ let left_len = left.len();
+ let right_len = right.len();
+ Node {
+ children: {
+ let children = PoolRef::new(&pool.node_pool, Chunk::pair(left, right));
+ Nodes(Size::Size(left_len + right_len), children)
+ },
+ }
+ }
+
+ pub(crate) fn elevate(self, pool: &RRBPool<A>, level_increment: usize) -> Self {
+ if level_increment > 0 {
+ Self::single_parent(pool, self.elevate(pool, level_increment - 1))
+ } else {
+ self
+ }
+ }
+
+ pub(crate) fn join_branches(self, pool: &RRBPool<A>, right: Self, level: usize) -> Self {
+ let left_len = self.len();
+ let right_len = right.len();
+ let size = if self.is_completely_dense(level) && right.is_dense() {
+ Size::Size(left_len + right_len)
+ } else {
+ let size_table = Chunk::pair(left_len, left_len + right_len);
+ Size::Table(PoolRef::new(&pool.size_pool, size_table))
+ };
+ Node {
+ children: {
+ let children = Chunk::pair(self, right);
+ Nodes(size, PoolRef::new(&pool.node_pool, children))
+ },
+ }
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ match self.children {
+ Entry::Nodes(Size::Size(size), _) => size,
+ Entry::Nodes(Size::Table(ref size_table), _) => *(size_table.last().unwrap_or(&0)),
+ Entry::Values(ref values) => values.len(),
+ Entry::Empty => 0,
+ }
+ }
+
+ pub(crate) fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ pub(crate) fn is_single(&self) -> bool {
+ self.children.len() == 1
+ }
+
+ pub(crate) fn is_full(&self) -> bool {
+ self.children.is_full()
+ }
+
+ #[allow(dead_code)] // this is only used by tests
+ pub(crate) fn number_of_children(&self) -> usize {
+ self.children.len()
+ }
+
+ pub(crate) fn first_child(&self) -> &Self {
+ self.children.unwrap_nodes().first().unwrap()
+ }
+
+ /// True if the node is dense and so doesn't have a size table
+ fn is_dense(&self) -> bool {
+ !matches!(self.children, Entry::Nodes(Size::Table(_), _))
+ }
+
+ /// True if the node and its children are dense and at capacity
+ // TODO can use this technique to quickly test if a Size::Table
+ // should be converted back to a Size::Size
+ fn is_completely_dense(&self, level: usize) -> bool {
+ // Size of a full node is NODE_SIZE at level 0, NODE_SIZE² at
+ // level 1, etc.
+ if let Some(expected_size) = NODE_SIZE.checked_pow(level as u32 + 1) {
+ self.size() == expected_size
+ } else {
+ // We overflowed a usize, there's no way we can be completely dense as we know the size
+ // fits in a usize.
+ false
+ }
+ }
+
+ #[inline]
+ fn size(&self) -> usize {
+ match self.children {
+ Entry::Nodes(ref size, _) => size.size(),
+ Entry::Values(ref values) => values.len(),
+ Entry::Empty => 0,
+ }
+ }
+
+ #[inline]
+ fn push_size(&mut self, pool: &RRBPool<A>, side: Side, level: usize, value: usize) {
+ if let Entry::Nodes(ref mut size, _) = self.children {
+ size.push(&pool.size_pool, side, level, value)
+ }
+ }
+
+ #[inline]
+ fn pop_size(&mut self, pool: &RRBPool<A>, side: Side, level: usize, value: usize) {
+ if let Entry::Nodes(ref mut size, _) = self.children {
+ size.pop(&pool.size_pool, side, level, value)
+ }
+ }
+
+ #[inline]
+ fn update_size(&mut self, pool: &RRBPool<A>, index: usize, level: usize, value: isize) {
+ if let Entry::Nodes(ref mut size, _) = self.children {
+ size.update(&pool.size_pool, index, level, value)
+ }
+ }
+
+ fn size_up_to(&self, level: usize, index: usize) -> usize {
+ if let Entry::Nodes(ref size, _) = self.children {
+ if index == 0 {
+ 0
+ } else {
+ match size {
+ Size::Table(ref size_table) => size_table[index - 1],
+ Size::Size(_) => index * NODE_SIZE.pow(level as u32),
+ }
+ }
+ } else {
+ index
+ }
+ }
+
+ fn index_in(&self, level: usize, index: usize) -> Option<usize> {
+ let mut target_idx = if let Some(child_size) = NODE_SIZE.checked_pow(level as u32) {
+ index / child_size
+ } else {
+ 0
+ };
+ if target_idx >= self.children.len() {
+ return None;
+ }
+ if let Entry::Nodes(Size::Table(ref size_table), _) = self.children {
+ while size_table[target_idx] <= index {
+ target_idx += 1;
+ if target_idx >= size_table.len() {
+ return None;
+ }
+ }
+ }
+ Some(target_idx)
+ }
+
+ pub(crate) fn index(&self, level: usize, index: usize) -> &A {
+ if level == 0 {
+ &self.children.unwrap_values()[index]
+ } else {
+ let target_idx = self.index_in(level, index).unwrap();
+ self.children.unwrap_nodes()[target_idx]
+ .index(level - 1, index - self.size_up_to(level, target_idx))
+ }
+ }
+
+ pub(crate) fn index_mut(&mut self, pool: &RRBPool<A>, level: usize, index: usize) -> &mut A {
+ if level == 0 {
+ &mut self.children.unwrap_values_mut(pool)[index]
+ } else {
+ let target_idx = self.index_in(level, index).unwrap();
+ let offset = index - self.size_up_to(level, target_idx);
+ let child = &mut self.children.unwrap_nodes_mut(pool)[target_idx];
+ child.index_mut(pool, level - 1, offset)
+ }
+ }
+
+ pub(crate) fn lookup_chunk(
+ &self,
+ level: usize,
+ base: usize,
+ index: usize,
+ ) -> (Range<usize>, *const Chunk<A>) {
+ if level == 0 {
+ (
+ base..(base + self.children.len()),
+ self.children.unwrap_values() as *const Chunk<A>,
+ )
+ } else {
+ let target_idx = self.index_in(level, index).unwrap();
+ let offset = self.size_up_to(level, target_idx);
+ let child_base = base + offset;
+ let children = self.children.unwrap_nodes();
+ let child = &children[target_idx];
+ child.lookup_chunk(level - 1, child_base, index - offset)
+ }
+ }
+
+ pub(crate) fn lookup_chunk_mut(
+ &mut self,
+ pool: &RRBPool<A>,
+ level: usize,
+ base: usize,
+ index: usize,
+ ) -> (Range<usize>, *mut Chunk<A>) {
+ if level == 0 {
+ (
+ base..(base + self.children.len()),
+ self.children.unwrap_values_mut(pool) as *mut Chunk<A>,
+ )
+ } else {
+ let target_idx = self.index_in(level, index).unwrap();
+ let offset = self.size_up_to(level, target_idx);
+ let child_base = base + offset;
+ let children = self.children.unwrap_nodes_mut(pool);
+ let child = &mut children[target_idx];
+ child.lookup_chunk_mut(pool, level - 1, child_base, index - offset)
+ }
+ }
+
+ fn push_child_node(&mut self, pool: &RRBPool<A>, side: Side, child: Node<A>) {
+ let children = self.children.unwrap_nodes_mut(pool);
+ match side {
+ Left => children.push_front(child),
+ Right => children.push_back(child),
+ }
+ }
+
+ fn pop_child_node(&mut self, pool: &RRBPool<A>, side: Side) -> Node<A> {
+ let children = self.children.unwrap_nodes_mut(pool);
+ match side {
+ Left => children.pop_front(),
+ Right => children.pop_back(),
+ }
+ }
+
+ pub(crate) fn push_chunk(
+ &mut self,
+ pool: &RRBPool<A>,
+ level: usize,
+ side: Side,
+ mut chunk: PoolRef<Chunk<A>>,
+ ) -> PushResult<PoolRef<Chunk<A>>> {
+ if chunk.is_empty() {
+ return PushResult::Done;
+ }
+ let is_full = self.is_full();
+ if level == 0 {
+ if self.children.is_empty_node() {
+ self.push_size(pool, side, level, chunk.len());
+ self.children = Values(chunk);
+ PushResult::Done
+ } else {
+ let values = self.children.unwrap_values_mut(pool);
+ if values.len() + chunk.len() <= NODE_SIZE {
+ let chunk = PoolRef::make_mut(&pool.value_pool, &mut chunk);
+ match side {
+ Side::Left => {
+ chunk.append(values);
+ values.append(chunk);
+ }
+ Side::Right => values.append(chunk),
+ }
+ PushResult::Done
+ } else {
+ PushResult::Full(chunk, 0)
+ }
+ }
+ } else if level == 1 {
+ // If rightmost existing node has any room, merge as much as
+ // possible over from the new node.
+ let num_drained = match side {
+ Side::Right => {
+ if let Entry::Nodes(ref mut size, ref mut children) = self.children {
+ let rightmost = PoolRef::make_mut(&pool.node_pool, children)
+ .last_mut()
+ .unwrap();
+ let old_size = rightmost.len();
+ let chunk = PoolRef::make_mut(&pool.value_pool, &mut chunk);
+ let values = rightmost.children.unwrap_values_mut(pool);
+ let to_drain = chunk.len().min(NODE_SIZE - values.len());
+ values.drain_from_front(chunk, to_drain);
+ size.pop(&pool.size_pool, Side::Right, level, old_size);
+ size.push(&pool.size_pool, Side::Right, level, values.len());
+ to_drain
+ } else {
+ 0
+ }
+ }
+ Side::Left => {
+ if let Entry::Nodes(ref mut size, ref mut children) = self.children {
+ let leftmost = PoolRef::make_mut(&pool.node_pool, children)
+ .first_mut()
+ .unwrap();
+ let old_size = leftmost.len();
+ let chunk = PoolRef::make_mut(&pool.value_pool, &mut chunk);
+ let values = leftmost.children.unwrap_values_mut(pool);
+ let to_drain = chunk.len().min(NODE_SIZE - values.len());
+ values.drain_from_back(chunk, to_drain);
+ size.pop(&pool.size_pool, Side::Left, level, old_size);
+ size.push(&pool.size_pool, Side::Left, level, values.len());
+ to_drain
+ } else {
+ 0
+ }
+ }
+ };
+ if is_full {
+ PushResult::Full(chunk, num_drained)
+ } else {
+ // If the chunk is empty after being drained, there might be
+ // more space in existing chunks. To keep the middle dense, we
+ // do not add it here.
+ if !chunk.is_empty() {
+ if side == Left && chunk.len() < NODE_SIZE {
+ if let Entry::Nodes(ref mut size, _) = self.children {
+ if let Size::Size(value) = *size {
+ *size = Size::table_from_size(&pool.size_pool, level, value);
+ }
+ }
+ }
+ self.push_size(pool, side, level, chunk.len());
+ self.push_child_node(pool, side, Node::from_chunk(pool, 0, chunk));
+ }
+ PushResult::Done
+ }
+ } else {
+ let chunk_size = chunk.len();
+ let index = match side {
+ Right => self.children.len() - 1,
+ Left => 0,
+ };
+ let new_child = {
+ let children = self.children.unwrap_nodes_mut(pool);
+ let child = &mut children[index];
+ match child.push_chunk(pool, level - 1, side, chunk) {
+ PushResult::Done => None,
+ PushResult::Full(chunk, num_drained) => {
+ // Our chunk was too large for `child`, so it could not
+ // be pushed there. However, exactly `num_drained`
+ // elements were added to the child. We need to reflect
+ // that change in the size field of the node.
+ match side {
+ Right => match self.children {
+ Entry::Nodes(Size::Table(ref mut sizes), _) => {
+ let sizes = PoolRef::make_mut(&pool.size_pool, sizes);
+ sizes[index] += num_drained;
+ }
+ Entry::Nodes(Size::Size(ref mut size), _) => {
+ *size += num_drained;
+ }
+ Entry::Values(_) | Entry::Empty => (),
+ },
+ Left => {
+ self.update_size(pool, 0, level, num_drained as isize);
+ }
+ }
+ if is_full {
+ return PushResult::Full(chunk, 0);
+ } else {
+ Some(Node::from_chunk(pool, level - 1, chunk))
+ }
+ }
+ }
+ };
+ match new_child {
+ None => {
+ self.update_size(pool, index, level, chunk_size as isize);
+ PushResult::Done
+ }
+ Some(child) => {
+ if side == Left && chunk_size < NODE_SIZE {
+ if let Entry::Nodes(ref mut size, _) = self.children {
+ if let Size::Size(value) = *size {
+ *size = Size::table_from_size(&pool.size_pool, level, value);
+ }
+ }
+ }
+ self.push_size(pool, side, level, child.len());
+ self.push_child_node(pool, side, child);
+ PushResult::Done
+ }
+ }
+ }
+ }
+
+ pub(crate) fn pop_chunk(
+ &mut self,
+ pool: &RRBPool<A>,
+ level: usize,
+ side: Side,
+ ) -> PopResult<PoolRef<Chunk<A>>> {
+ if self.is_empty() {
+ return PopResult::Empty;
+ }
+ if level == 0 {
+ // should only get here if the tree is just one leaf node
+ match replace(&mut self.children, Empty) {
+ Values(chunk) => PopResult::Drained(chunk),
+ Empty => panic!("rrb::Node::pop_chunk: non-empty tree with Empty leaf"),
+ Nodes(_, _) => panic!("rrb::Node::pop_chunk: branch node at leaf"),
+ }
+ } else if level == 1 {
+ let child_node = self.pop_child_node(pool, side);
+ self.pop_size(pool, side, level, child_node.len());
+ let chunk = match child_node.children {
+ Values(ref chunk) => chunk.clone(),
+ Empty => panic!("rrb::Node::pop_chunk: non-empty tree with Empty leaf"),
+ Nodes(_, _) => panic!("rrb::Node::pop_chunk: branch node at leaf"),
+ };
+ if self.is_empty() {
+ PopResult::Drained(chunk)
+ } else {
+ PopResult::Done(chunk)
+ }
+ } else {
+ let index = match side {
+ Right => self.children.len() - 1,
+ Left => 0,
+ };
+ let mut drained = false;
+ let chunk = {
+ let children = self.children.unwrap_nodes_mut(pool);
+ let child = &mut children[index];
+ match child.pop_chunk(pool, level - 1, side) {
+ PopResult::Empty => return PopResult::Empty,
+ PopResult::Done(chunk) => chunk,
+ PopResult::Drained(chunk) => {
+ drained = true;
+ chunk
+ }
+ }
+ };
+ if drained {
+ self.pop_size(pool, side, level, chunk.len());
+ self.pop_child_node(pool, side);
+ if self.is_empty() {
+ PopResult::Drained(chunk)
+ } else {
+ PopResult::Done(chunk)
+ }
+ } else {
+ self.update_size(pool, index, level, -(chunk.len() as isize));
+ PopResult::Done(chunk)
+ }
+ }
+ }
+
+ pub(crate) fn split(
+ &mut self,
+ pool: &RRBPool<A>,
+ level: usize,
+ drop_side: Side,
+ index: usize,
+ ) -> SplitResult {
+ if index == 0 && drop_side == Side::Left {
+ // Dropped nothing
+ return SplitResult::Dropped(0);
+ }
+ if level > 0 && index == 0 && drop_side == Side::Right {
+ // Dropped everything
+ let dropped = if let Entry::Nodes(ref size, _) = self.children {
+ size.size()
+ } else {
+ panic!("leaf node at non-leaf level!");
+ };
+ self.children = Entry::Empty;
+ return SplitResult::Dropped(dropped);
+ }
+ let mut dropped;
+ if level == 0 {
+ let len = self.children.len();
+ if index >= len {
+ return SplitResult::OutOfBounds;
+ }
+ let children = self.children.unwrap_values_mut(pool);
+ match drop_side {
+ Side::Left => children.drop_left(index),
+ Side::Right => children.drop_right(index),
+ }
+ SplitResult::Dropped(match drop_side {
+ Left => index,
+ Right => len - index,
+ })
+ } else if let Some(target_idx) = self.index_in(level, index) {
+ let size_up_to = self.size_up_to(level, target_idx);
+ let (size, children) =
+ if let Entry::Nodes(ref mut size, ref mut children) = self.children {
+ (size, PoolRef::make_mut(&pool.node_pool, children))
+ } else {
+ unreachable!()
+ };
+ let child_gone = 0 == {
+ let child_node = &mut children[target_idx];
+ match child_node.split(pool, level - 1, drop_side, index - size_up_to) {
+ SplitResult::OutOfBounds => return SplitResult::OutOfBounds,
+ SplitResult::Dropped(amount) => dropped = amount,
+ }
+ child_node.len()
+ };
+ match drop_side {
+ Left => {
+ let mut drop_from = target_idx;
+ if child_gone {
+ drop_from += 1;
+ }
+ children.drop_left(drop_from);
+ if let Size::Size(value) = *size {
+ *size = Size::table_from_size(&pool.size_pool, level, value);
+ }
+ let size_table = if let Size::Table(ref mut size_ref) = size {
+ PoolRef::make_mut(&pool.size_pool, size_ref)
+ } else {
+ unreachable!()
+ };
+ let dropped_size = if target_idx > 0 {
+ size_table[target_idx - 1]
+ } else {
+ 0
+ };
+ dropped += dropped_size;
+ size_table.drop_left(drop_from);
+ for i in size_table.iter_mut() {
+ *i -= dropped;
+ }
+ }
+ Right => {
+ let at_last = target_idx == children.len() - 1;
+ let mut drop_from = target_idx + 1;
+ if child_gone {
+ drop_from -= 1;
+ }
+ if drop_from < children.len() {
+ children.drop_right(drop_from);
+ }
+ match size {
+ Size::Size(ref mut size) if at_last => {
+ *size -= dropped;
+ }
+ Size::Size(ref mut size) => {
+ let size_per_child = NODE_SIZE.pow(level as u32);
+ let remainder = (target_idx + 1) * size_per_child;
+ let new_size = remainder - dropped;
+ if new_size < *size {
+ dropped = *size - new_size;
+ *size = new_size;
+ } else {
+ unreachable!(
+ "this means node is empty, should be caught at start of method"
+ );
+ }
+ }
+ Size::Table(ref mut size_ref) => {
+ let size_table = PoolRef::make_mut(&pool.size_pool, size_ref);
+ let dropped_size =
+ size_table[size_table.len() - 1] - size_table[target_idx];
+ if drop_from < size_table.len() {
+ size_table.drop_right(drop_from);
+ }
+ if !child_gone {
+ size_table[target_idx] -= dropped;
+ }
+ dropped += dropped_size;
+ }
+ }
+ }
+ }
+ SplitResult::Dropped(dropped)
+ } else {
+ SplitResult::OutOfBounds
+ }
+ }
+
+ fn merge_leaves(pool: &RRBPool<A>, mut left: Self, mut right: Self) -> Self {
+ if left.children.is_empty_node() {
+ // Left is empty, just use right
+ Self::single_parent(pool, right)
+ } else if right.children.is_empty_node() {
+ // Right is empty, just use left
+ Self::single_parent(pool, left)
+ } else {
+ {
+ let left_vals = left.children.unwrap_values_mut(pool);
+ let left_len = left_vals.len();
+ let right_vals = right.children.unwrap_values_mut(pool);
+ let right_len = right_vals.len();
+ if left_len + right_len <= NODE_SIZE {
+ left_vals.append(right_vals);
+ } else {
+ let count = right_len.min(NODE_SIZE - left_len);
+ left_vals.drain_from_front(right_vals, count);
+ }
+ }
+ if right.is_empty() {
+ Self::single_parent(pool, left)
+ } else {
+ Self::join_dense(pool, left, right)
+ }
+ }
+ }
+
+ fn merge_rebalance(
+ pool: &RRBPool<A>,
+ level: usize,
+ left: Self,
+ middle: Self,
+ right: Self,
+ ) -> Self {
+ let left_nodes = left.children.nodes().into_iter();
+ let middle_nodes = middle.children.nodes().into_iter();
+ let right_nodes = right.children.nodes().into_iter();
+ let mut subtree_still_balanced = true;
+ let mut next_leaf = Chunk::new();
+ let mut next_node = Chunk::new();
+ let mut next_subtree = Chunk::new();
+ let mut root = Chunk::new();
+
+ for subtree in left_nodes.chain(middle_nodes).chain(right_nodes) {
+ if subtree.is_empty() {
+ continue;
+ }
+ if subtree.is_completely_dense(level) && subtree_still_balanced {
+ root.push_back(subtree);
+ continue;
+ }
+ subtree_still_balanced = false;
+
+ if level == 1 {
+ for value in subtree.children.values() {
+ next_leaf.push_back(value);
+ if next_leaf.is_full() {
+ let new_node =
+ Node::from_chunk(pool, 0, PoolRef::new(&pool.value_pool, next_leaf));
+ next_subtree.push_back(new_node);
+ next_leaf = Chunk::new();
+ if next_subtree.is_full() {
+ let new_subtree = Node::parent(pool, level, next_subtree);
+ root.push_back(new_subtree);
+ next_subtree = Chunk::new();
+ }
+ }
+ }
+ } else {
+ for node in subtree.children.nodes() {
+ next_node.push_back(node);
+ if next_node.is_full() {
+ let new_node = Node::parent(pool, level - 1, next_node);
+ next_subtree.push_back(new_node);
+ next_node = Chunk::new();
+ if next_subtree.is_full() {
+ let new_subtree = Node::parent(pool, level, next_subtree);
+ root.push_back(new_subtree);
+ next_subtree = Chunk::new();
+ }
+ }
+ }
+ }
+ }
+ if !next_leaf.is_empty() {
+ let new_node = Node::from_chunk(pool, 0, PoolRef::new(&pool.value_pool, next_leaf));
+ next_subtree.push_back(new_node);
+ }
+ if !next_node.is_empty() {
+ let new_node = Node::parent(pool, level - 1, next_node);
+ next_subtree.push_back(new_node);
+ }
+ if !next_subtree.is_empty() {
+ let new_subtree = Node::parent(pool, level, next_subtree);
+ root.push_back(new_subtree);
+ }
+ Node::parent(pool, level + 1, root)
+ }
+
+ pub(crate) fn merge(pool: &RRBPool<A>, mut left: Self, mut right: Self, level: usize) -> Self {
+ if level == 0 {
+ Self::merge_leaves(pool, left, right)
+ } else {
+ let merged = {
+ if level == 1 {
+ // We're going to rebalance all the leaves anyway, there's
+ // no need for a middle at level 1
+ Node::parent(pool, 0, Chunk::new())
+ } else {
+ let left_last =
+ if let Entry::Nodes(ref mut size, ref mut children) = left.children {
+ let node = PoolRef::make_mut(&pool.node_pool, children).pop_back();
+ if !node.is_empty() {
+ size.pop(&pool.size_pool, Side::Right, level, node.len());
+ }
+ node
+ } else {
+ panic!("expected nodes, found entries or empty");
+ };
+ let right_first =
+ if let Entry::Nodes(ref mut size, ref mut children) = right.children {
+ let node = PoolRef::make_mut(&pool.node_pool, children).pop_front();
+ if !node.is_empty() {
+ size.pop(&pool.size_pool, Side::Left, level, node.len());
+ }
+ node
+ } else {
+ panic!("expected nodes, found entries or empty");
+ };
+ Self::merge(pool, left_last, right_first, level - 1)
+ }
+ };
+ Self::merge_rebalance(pool, level, left, merged, right)
+ }
+ }
+
+ #[cfg(any(test, feature = "debug"))]
+ pub(crate) fn assert_invariants(&self, level: usize) -> usize {
+ // Verifies that the size table matches reality.
+ match self.children {
+ Entry::Empty => 0,
+ Entry::Values(ref values) => {
+ // An empty value node is pointless and should never occur.
+ assert_ne!(0, values.len());
+ // Value nodes should only occur at level 0.
+ assert_eq!(0, level);
+ values.len()
+ }
+ Entry::Nodes(ref size, ref children) => {
+ // A parent node with no children should never occur.
+ assert_ne!(0, children.len());
+ // Parent nodes should never occur at level 0.
+ assert_ne!(0, level);
+ let mut lengths = Vec::new();
+ let should_be_dense = matches!(size, Size::Size(_));
+ for (index, child) in children.iter().enumerate() {
+ let len = child.assert_invariants(level - 1);
+ if should_be_dense && index < children.len() - 1 {
+ // Assert that non-end nodes without size tables are full.
+ assert_eq!(len, NODE_SIZE.pow(level as u32));
+ }
+ lengths.push(len);
+ }
+ match size {
+ Size::Size(size) => {
+ let total: usize = lengths.iter().sum();
+ assert_eq!(*size, total);
+ }
+ Size::Table(ref table) => {
+ assert_eq!(table.iter().len(), children.len());
+ for (index, current) in table.iter().enumerate() {
+ let expected: usize = lengths.iter().take(index + 1).sum();
+ assert_eq!(expected, *current);
+ }
+ }
+ }
+ lengths.iter().sum()
+ }
+ }
+ }
+
+ // pub fn print<W>(&self, f: &mut W, indent: usize, level: usize) -> Result<(), fmt::Error>
+ // where
+ // W: fmt::Write,
+ // A: fmt::Debug,
+ // {
+ // print_indent(f, indent)?;
+ // if level == 0 {
+ // if self.children.is_empty_node() {
+ // writeln!(f, "Leaf: EMPTY")
+ // } else {
+ // writeln!(f, "Leaf: {:?}", self.children.unwrap_values())
+ // }
+ // } else {
+ // match &self.children {
+ // Entry::Nodes(size, children) => {
+ // writeln!(f, "Node level {} size_table {:?}", level, size)?;
+ // for child in children.iter() {
+ // child.print(f, indent + 4, level - 1)?;
+ // }
+ // Ok(())
+ // }
+ // _ => unreachable!(),
+ // }
+ // }
+ // }
+}
+
+// fn print_indent<W>(f: &mut W, indent: usize) -> Result<(), fmt::Error>
+// where
+// W: fmt::Write,
+// {
+// for _i in 0..indent {
+// write!(f, " ")?;
+// }
+// Ok(())
+// }
diff --git a/vendor/im-rc/src/ord/map.rs b/vendor/im-rc/src/ord/map.rs
new file mode 100644
index 000000000..ad87932dc
--- /dev/null
+++ b/vendor/im-rc/src/ord/map.rs
@@ -0,0 +1,2649 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+//! An ordered map.
+//!
+//! An immutable ordered map implemented as a [B-tree] [1].
+//!
+//! Most operations on this type of map are O(log n). A
+//! [`HashMap`][hashmap::HashMap] is usually a better choice for
+//! performance, but the `OrdMap` has the advantage of only requiring
+//! an [`Ord`][std::cmp::Ord] constraint on the key, and of being
+//! ordered, so that keys always come out from lowest to highest,
+//! where a [`HashMap`][hashmap::HashMap] has no guaranteed ordering.
+//!
+//! [1]: https://en.wikipedia.org/wiki/B-tree
+//! [hashmap::HashMap]: ../hashmap/struct.HashMap.html
+//! [std::cmp::Ord]: https://doc.rust-lang.org/std/cmp/trait.Ord.html
+
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::collections;
+use std::fmt::{Debug, Error, Formatter};
+use std::hash::{BuildHasher, Hash, Hasher};
+use std::iter::{FromIterator, Iterator, Sum};
+use std::mem;
+use std::ops::{Add, Index, IndexMut, RangeBounds};
+
+use crate::hashmap::HashMap;
+use crate::nodes::btree::{BTreeValue, Insert, Node, Remove};
+#[cfg(has_specialisation)]
+use crate::util::linear_search_by;
+use crate::util::{Pool, PoolRef};
+
+pub use crate::nodes::btree::{
+ ConsumingIter, DiffItem as NodeDiffItem, DiffIter as NodeDiffIter, Iter as RangedIter,
+};
+
+/// Construct a map from a sequence of key/value pairs.
+///
+/// # Examples
+///
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::ordmap::OrdMap;
+/// # fn main() {
+/// assert_eq!(
+/// ordmap!{
+/// 1 => 11,
+/// 2 => 22,
+/// 3 => 33
+/// },
+/// OrdMap::from(vec![(1, 11), (2, 22), (3, 33)])
+/// );
+/// # }
+/// ```
+#[macro_export]
+macro_rules! ordmap {
+ () => { $crate::ordmap::OrdMap::new() };
+
+ ( $( $key:expr => $value:expr ),* ) => {{
+ let mut map = $crate::ordmap::OrdMap::new();
+ $({
+ map.insert($key, $value);
+ })*;
+ map
+ }};
+}
+
+#[cfg(not(has_specialisation))]
+impl<K: Ord, V> BTreeValue for (K, V) {
+ type Key = K;
+
+ fn ptr_eq(&self, _other: &Self) -> bool {
+ false
+ }
+
+ fn search_key<BK>(slice: &[Self], key: &BK) -> Result<usize, usize>
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ slice.binary_search_by(|value| Self::Key::borrow(&value.0).cmp(key))
+ }
+
+ fn search_value(slice: &[Self], key: &Self) -> Result<usize, usize> {
+ slice.binary_search_by(|value| value.0.cmp(&key.0))
+ }
+
+ fn cmp_keys<BK>(&self, other: &BK) -> Ordering
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ Self::Key::borrow(&self.0).cmp(other)
+ }
+
+ fn cmp_values(&self, other: &Self) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K: Ord, V> BTreeValue for (K, V) {
+ type Key = K;
+
+ fn ptr_eq(&self, _other: &Self) -> bool {
+ false
+ }
+
+ default fn search_key<BK>(slice: &[Self], key: &BK) -> Result<usize, usize>
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ slice.binary_search_by(|value| Self::Key::borrow(&value.0).cmp(key))
+ }
+
+ default fn search_value(slice: &[Self], key: &Self) -> Result<usize, usize> {
+ slice.binary_search_by(|value| value.0.cmp(&key.0))
+ }
+
+ fn cmp_keys<BK>(&self, other: &BK) -> Ordering
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ Self::Key::borrow(&self.0).cmp(other)
+ }
+
+ fn cmp_values(&self, other: &Self) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K: Ord + Copy, V> BTreeValue for (K, V) {
+ fn search_key<BK>(slice: &[Self], key: &BK) -> Result<usize, usize>
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ linear_search_by(slice, |value| Self::Key::borrow(&value.0).cmp(key))
+ }
+
+ fn search_value(slice: &[Self], key: &Self) -> Result<usize, usize> {
+ linear_search_by(slice, |value| value.0.cmp(&key.0))
+ }
+}
+
+def_pool!(OrdMapPool<K, V>, Node<(K, V)>);
+
+/// An ordered map.
+///
+/// An immutable ordered map implemented as a B-tree.
+///
+/// Most operations on this type of map are O(log n). A
+/// [`HashMap`][hashmap::HashMap] is usually a better choice for
+/// performance, but the `OrdMap` has the advantage of only requiring
+/// an [`Ord`][std::cmp::Ord] constraint on the key, and of being
+/// ordered, so that keys always come out from lowest to highest,
+/// where a [`HashMap`][hashmap::HashMap] has no guaranteed ordering.
+///
+/// [hashmap::HashMap]: ../hashmap/struct.HashMap.html
+/// [std::cmp::Ord]: https://doc.rust-lang.org/std/cmp/trait.Ord.html
+pub struct OrdMap<K, V> {
+ size: usize,
+ pool: OrdMapPool<K, V>,
+ root: PoolRef<Node<(K, V)>>,
+}
+
+impl<K, V> OrdMap<K, V> {
+ /// Construct an empty map.
+ #[must_use]
+ pub fn new() -> Self {
+ let pool = OrdMapPool::default();
+ let root = PoolRef::default(&pool.0);
+ OrdMap {
+ size: 0,
+ pool,
+ root,
+ }
+ }
+
+ /// Construct an empty map using a specific memory pool.
+ #[cfg(feature = "pool")]
+ #[must_use]
+ pub fn with_pool(pool: &OrdMapPool<K, V>) -> Self {
+ let root = PoolRef::default(&pool.0);
+ OrdMap {
+ size: 0,
+ pool: pool.clone(),
+ root,
+ }
+ }
+
+ /// Construct a map with a single mapping.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map = OrdMap::unit(123, "onetwothree");
+ /// assert_eq!(
+ /// map.get(&123),
+ /// Some(&"onetwothree")
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn unit(key: K, value: V) -> Self {
+ let pool = OrdMapPool::default();
+ let root = PoolRef::new(&pool.0, Node::unit((key, value)));
+ OrdMap {
+ size: 1,
+ pool,
+ root,
+ }
+ }
+
+ /// Test whether a map is empty.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// assert!(
+ /// !ordmap!{1 => 2}.is_empty()
+ /// );
+ /// assert!(
+ /// OrdMap::<i32, i32>::new().is_empty()
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Test whether two maps refer to the same content in memory.
+ ///
+ /// This is true if the two sides are references to the same map,
+ /// or if the two maps refer to the same root node.
+ ///
+ /// This would return true if you're comparing a map to itself, or
+ /// if you're comparing a map to a fresh clone of itself.
+ ///
+ /// Time: O(1)
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ std::ptr::eq(self, other) || PoolRef::ptr_eq(&self.root, &other.root)
+ }
+
+ /// Get the size of a map.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// assert_eq!(3, ordmap!{
+ /// 1 => 11,
+ /// 2 => 22,
+ /// 3 => 33
+ /// }.len());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.size
+ }
+
+ /// Get a reference to the memory pool used by this map.
+ ///
+ /// Note that if you didn't specifically construct it with a pool, you'll
+ /// get back a reference to a pool of size 0.
+ #[cfg(feature = "pool")]
+ pub fn pool(&self) -> &OrdMapPool<K, V> {
+ &self.pool
+ }
+
+ /// Discard all elements from the map.
+ ///
+ /// This leaves you with an empty map, and all elements that
+ /// were previously inside it are dropped.
+ ///
+ /// Time: O(n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdMap;
+ /// let mut map = ordmap![1=>1, 2=>2, 3=>3];
+ /// map.clear();
+ /// assert!(map.is_empty());
+ /// ```
+ pub fn clear(&mut self) {
+ if !self.is_empty() {
+ self.root = PoolRef::default(&self.pool.0);
+ self.size = 0;
+ }
+ }
+}
+
+impl<K, V> OrdMap<K, V>
+where
+ K: Ord,
+{
+ /// Get the largest key in a map, along with its value. If the map
+ /// is empty, return `None`.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// assert_eq!(Some(&(3, 33)), ordmap!{
+ /// 1 => 11,
+ /// 2 => 22,
+ /// 3 => 33
+ /// }.get_max());
+ /// ```
+ #[must_use]
+ pub fn get_max(&self) -> Option<&(K, V)> {
+ self.root.max()
+ }
+
+ /// Get the smallest key in a map, along with its value. If the
+ /// map is empty, return `None`.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// assert_eq!(Some(&(1, 11)), ordmap!{
+ /// 1 => 11,
+ /// 2 => 22,
+ /// 3 => 33
+ /// }.get_min());
+ /// ```
+ #[must_use]
+ pub fn get_min(&self) -> Option<&(K, V)> {
+ self.root.min()
+ }
+
+ /// Get an iterator over the key/value pairs of a map.
+ #[must_use]
+ pub fn iter(&self) -> Iter<'_, K, V> {
+ Iter {
+ it: RangedIter::new(&self.root, self.size, ..),
+ }
+ }
+
+ /// Create an iterator over a range of key/value pairs.
+ #[must_use]
+ pub fn range<R, BK>(&self, range: R) -> Iter<'_, K, V>
+ where
+ R: RangeBounds<BK>,
+ K: Borrow<BK>,
+ BK: Ord + ?Sized,
+ {
+ Iter {
+ it: RangedIter::new(&self.root, self.size, range),
+ }
+ }
+
+ /// Get an iterator over a map's keys.
+ #[must_use]
+ pub fn keys(&self) -> Keys<'_, K, V> {
+ Keys { it: self.iter() }
+ }
+
+ /// Get an iterator over a map's values.
+ #[must_use]
+ pub fn values(&self) -> Values<'_, K, V> {
+ Values { it: self.iter() }
+ }
+
+ /// Get an iterator over the differences between this map and
+ /// another, i.e. the set of entries to add, update, or remove to
+ /// this map in order to make it equal to the other map.
+ ///
+ /// This function will avoid visiting nodes which are shared
+ /// between the two maps, meaning that even very large maps can be
+ /// compared quickly if most of their structure is shared.
+ ///
+ /// Time: O(n) (where n is the number of unique elements across
+ /// the two maps, minus the number of elements belonging to nodes
+ /// shared between them)
+ #[must_use]
+ pub fn diff<'a>(&'a self, other: &'a Self) -> DiffIter<'a, K, V> {
+ DiffIter {
+ it: NodeDiffIter::new(&self.root, &other.root),
+ }
+ }
+
+ /// Get the value for a key from a map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map = ordmap!{123 => "lol"};
+ /// assert_eq!(
+ /// map.get(&123),
+ /// Some(&"lol")
+ /// );
+ /// ```
+ #[must_use]
+ pub fn get<BK>(&self, key: &BK) -> Option<&V>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.root.lookup(key).map(|(_, v)| v)
+ }
+
+ /// Get the key/value pair for a key from a map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map = ordmap!{123 => "lol"};
+ /// assert_eq!(
+ /// map.get_key_value(&123),
+ /// Some((&123, &"lol"))
+ /// );
+ /// ```
+ #[must_use]
+ pub fn get_key_value<BK>(&self, key: &BK) -> Option<(&K, &V)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.root.lookup(key).map(|&(ref k, ref v)| (k, v))
+ }
+
+ /// Get the closest smaller entry in a map to a given key
+ /// as a mutable reference.
+ ///
+ /// If the map contains the given key, this is returned.
+ /// Otherwise, the closest key in the map smaller than the
+ /// given value is returned. If the smallest key in the map
+ /// is larger than the given key, `None` is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdMap;
+ /// let map = ordmap![1 => 1, 3 => 3, 5 => 5];
+ /// assert_eq!(Some((&3, &3)), map.get_prev(&4));
+ /// ```
+ #[must_use]
+ pub fn get_prev<BK>(&self, key: &BK) -> Option<(&K, &V)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.root.lookup_prev(key).map(|(k, v)| (k, v))
+ }
+
+ /// Get the closest larger entry in a map to a given key
+ /// as a mutable reference.
+ ///
+ /// If the set contains the given value, this is returned.
+ /// Otherwise, the closest value in the set larger than the
+ /// given value is returned. If the largest value in the set
+ /// is smaller than the given value, `None` is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdMap;
+ /// let map = ordmap![1 => 1, 3 => 3, 5 => 5];
+ /// assert_eq!(Some((&5, &5)), map.get_next(&4));
+ /// ```
+ #[must_use]
+ pub fn get_next<BK>(&self, key: &BK) -> Option<(&K, &V)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.root.lookup_next(key).map(|(k, v)| (k, v))
+ }
+
+ /// Test for the presence of a key in a map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map = ordmap!{123 => "lol"};
+ /// assert!(
+ /// map.contains_key(&123)
+ /// );
+ /// assert!(
+ /// !map.contains_key(&321)
+ /// );
+ /// ```
+ #[must_use]
+ pub fn contains_key<BK>(&self, k: &BK) -> bool
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.get(k).is_some()
+ }
+
+ /// Test whether a map is a submap of another map, meaning that
+ /// all keys in our map must also be in the other map, with the
+ /// same values.
+ ///
+ /// Use the provided function to decide whether values are equal.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn is_submap_by<B, RM, F>(&self, other: RM, mut cmp: F) -> bool
+ where
+ F: FnMut(&V, &B) -> bool,
+ RM: Borrow<OrdMap<K, B>>,
+ {
+ self.iter()
+ .all(|(k, v)| other.borrow().get(k).map(|ov| cmp(v, ov)).unwrap_or(false))
+ }
+
+ /// Test whether a map is a proper submap of another map, meaning
+ /// that all keys in our map must also be in the other map, with
+ /// the same values. To be a proper submap, ours must also contain
+ /// fewer keys than the other map.
+ ///
+ /// Use the provided function to decide whether values are equal.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn is_proper_submap_by<B, RM, F>(&self, other: RM, cmp: F) -> bool
+ where
+ F: FnMut(&V, &B) -> bool,
+ RM: Borrow<OrdMap<K, B>>,
+ {
+ self.len() != other.borrow().len() && self.is_submap_by(other, cmp)
+ }
+
+ /// Test whether a map is a submap of another map, meaning that
+ /// all keys in our map must also be in the other map, with the
+ /// same values.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 2 => 2};
+ /// let map2 = ordmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert!(map1.is_submap(map2));
+ /// ```
+ #[must_use]
+ pub fn is_submap<RM>(&self, other: RM) -> bool
+ where
+ V: PartialEq,
+ RM: Borrow<Self>,
+ {
+ self.is_submap_by(other.borrow(), PartialEq::eq)
+ }
+
+ /// Test whether a map is a proper submap of another map, meaning
+ /// that all keys in our map must also be in the other map, with
+ /// the same values. To be a proper submap, ours must also contain
+ /// fewer keys than the other map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 2 => 2};
+ /// let map2 = ordmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert!(map1.is_proper_submap(map2));
+ ///
+ /// let map3 = ordmap!{1 => 1, 2 => 2};
+ /// let map4 = ordmap!{1 => 1, 2 => 2};
+ /// assert!(!map3.is_proper_submap(map4));
+ /// ```
+ #[must_use]
+ pub fn is_proper_submap<RM>(&self, other: RM) -> bool
+ where
+ V: PartialEq,
+ RM: Borrow<Self>,
+ {
+ self.is_proper_submap_by(other.borrow(), PartialEq::eq)
+ }
+}
+
+impl<K, V> OrdMap<K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ /// Get a mutable reference to the value for a key from a map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let mut map = ordmap!{123 => "lol"};
+ /// if let Some(value) = map.get_mut(&123) {
+ /// *value = "omg";
+ /// }
+ /// assert_eq!(
+ /// map.get(&123),
+ /// Some(&"omg")
+ /// );
+ /// ```
+ #[must_use]
+ pub fn get_mut<BK>(&mut self, key: &BK) -> Option<&mut V>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ root.lookup_mut(&self.pool.0, key).map(|(_, v)| v)
+ }
+
+ /// Get the closest smaller entry in a map to a given key
+ /// as a mutable reference.
+ ///
+ /// If the map contains the given key, this is returned.
+ /// Otherwise, the closest key in the map smaller than the
+ /// given value is returned. If the smallest key in the map
+ /// is larger than the given key, `None` is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdMap;
+ /// let mut map = ordmap![1 => 1, 3 => 3, 5 => 5];
+ /// if let Some((key, value)) = map.get_prev_mut(&4) {
+ /// *value = 4;
+ /// }
+ /// assert_eq!(ordmap![1 => 1, 3 => 4, 5 => 5], map);
+ /// ```
+ #[must_use]
+ pub fn get_prev_mut<BK>(&mut self, key: &BK) -> Option<(&K, &mut V)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ let pool = &self.pool.0;
+ PoolRef::make_mut(pool, &mut self.root)
+ .lookup_prev_mut(pool, key)
+ .map(|(ref k, ref mut v)| (k, v))
+ }
+
+ /// Get the closest larger entry in a map to a given key
+ /// as a mutable reference.
+ ///
+ /// If the set contains the given value, this is returned.
+ /// Otherwise, the closest value in the set larger than the
+ /// given value is returned. If the largest value in the set
+ /// is smaller than the given value, `None` is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdMap;
+ /// let mut map = ordmap![1 => 1, 3 => 3, 5 => 5];
+ /// if let Some((key, value)) = map.get_next_mut(&4) {
+ /// *value = 4;
+ /// }
+ /// assert_eq!(ordmap![1 => 1, 3 => 3, 5 => 4], map);
+ /// ```
+ #[must_use]
+ pub fn get_next_mut<BK>(&mut self, key: &BK) -> Option<(&K, &mut V)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ let pool = &self.pool.0;
+ PoolRef::make_mut(pool, &mut self.root)
+ .lookup_next_mut(pool, key)
+ .map(|(ref k, ref mut v)| (k, v))
+ }
+
+ /// Insert a key/value mapping into a map.
+ ///
+ /// This is a copy-on-write operation, so that the parts of the
+ /// map's structure which are shared with other maps will be
+ /// safely copied before mutating.
+ ///
+ /// If the map already has a mapping for the given key, the
+ /// previous value is overwritten.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let mut map = ordmap!{};
+ /// map.insert(123, "123");
+ /// map.insert(456, "456");
+ /// assert_eq!(
+ /// map,
+ /// ordmap!{123 => "123", 456 => "456"}
+ /// );
+ /// ```
+ ///
+ /// [insert]: #method.insert
+ #[inline]
+ pub fn insert(&mut self, key: K, value: V) -> Option<V> {
+ let new_root = {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.insert(&self.pool.0, (key, value)) {
+ Insert::Replaced((_, old_value)) => return Some(old_value),
+ Insert::Added => {
+ self.size += 1;
+ return None;
+ }
+ Insert::Split(left, median, right) => PoolRef::new(
+ &self.pool.0,
+ Node::new_from_split(&self.pool.0, left, median, right),
+ ),
+ }
+ };
+ self.size += 1;
+ self.root = new_root;
+ None
+ }
+
+ /// Remove a key/value mapping from a map if it exists.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let mut map = ordmap!{123 => "123", 456 => "456"};
+ /// map.remove(&123);
+ /// map.remove(&456);
+ /// assert!(map.is_empty());
+ /// ```
+ ///
+ /// [remove]: #method.remove
+ #[inline]
+ pub fn remove<BK>(&mut self, k: &BK) -> Option<V>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.remove_with_key(k).map(|(_, v)| v)
+ }
+
+ /// Remove a key/value pair from a map, if it exists, and return
+ /// the removed key and value.
+ ///
+ /// Time: O(log n)
+ pub fn remove_with_key<BK>(&mut self, k: &BK) -> Option<(K, V)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ let (new_root, removed_value) = {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.remove(&self.pool.0, k) {
+ Remove::NoChange => return None,
+ Remove::Removed(pair) => {
+ self.size -= 1;
+ return Some(pair);
+ }
+ Remove::Update(pair, root) => (PoolRef::new(&self.pool.0, root), Some(pair)),
+ }
+ };
+ self.size -= 1;
+ self.root = new_root;
+ removed_value
+ }
+
+ /// Construct a new map by inserting a key/value mapping into a
+ /// map.
+ ///
+ /// If the map already has a mapping for the given key, the
+ /// previous value is overwritten.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map = ordmap!{};
+ /// assert_eq!(
+ /// map.update(123, "123"),
+ /// ordmap!{123 => "123"}
+ /// );
+ /// ```
+ #[must_use]
+ pub fn update(&self, key: K, value: V) -> Self {
+ let mut out = self.clone();
+ out.insert(key, value);
+ out
+ }
+
+ /// Construct a new map by inserting a key/value mapping into a
+ /// map.
+ ///
+ /// If the map already has a mapping for the given key, we call
+ /// the provided function with the old value and the new value,
+ /// and insert the result as the new value.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn update_with<F>(self, k: K, v: V, f: F) -> Self
+ where
+ F: FnOnce(V, V) -> V,
+ {
+ self.update_with_key(k, v, |_, v1, v2| f(v1, v2))
+ }
+
+ /// Construct a new map by inserting a key/value mapping into a
+ /// map.
+ ///
+ /// If the map already has a mapping for the given key, we call
+ /// the provided function with the key, the old value and the new
+ /// value, and insert the result as the new value.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn update_with_key<F>(self, k: K, v: V, f: F) -> Self
+ where
+ F: FnOnce(&K, V, V) -> V,
+ {
+ match self.extract_with_key(&k) {
+ None => self.update(k, v),
+ Some((_, v2, m)) => {
+ let out_v = f(&k, v2, v);
+ m.update(k, out_v)
+ }
+ }
+ }
+
+ /// Construct a new map by inserting a key/value mapping into a
+ /// map, returning the old value for the key as well as the new
+ /// map.
+ ///
+ /// If the map already has a mapping for the given key, we call
+ /// the provided function with the key, the old value and the new
+ /// value, and insert the result as the new value.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn update_lookup_with_key<F>(self, k: K, v: V, f: F) -> (Option<V>, Self)
+ where
+ F: FnOnce(&K, &V, V) -> V,
+ {
+ match self.extract_with_key(&k) {
+ None => (None, self.update(k, v)),
+ Some((_, v2, m)) => {
+ let out_v = f(&k, &v2, v);
+ (Some(v2), m.update(k, out_v))
+ }
+ }
+ }
+
+ /// Update the value for a given key by calling a function with
+ /// the current value and overwriting it with the function's
+ /// return value.
+ ///
+ /// The function gets an [`Option<V>`][std::option::Option] and
+ /// returns the same, so that it can decide to delete a mapping
+ /// instead of updating the value, and decide what to do if the
+ /// key isn't in the map.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [std::option::Option]: https://doc.rust-lang.org/std/option/enum.Option.html
+ #[must_use]
+ pub fn alter<F>(&self, f: F, k: K) -> Self
+ where
+ F: FnOnce(Option<V>) -> Option<V>,
+ {
+ let pop = self.extract_with_key(&k);
+ match (f(pop.as_ref().map(|&(_, ref v, _)| v.clone())), pop) {
+ (None, None) => self.clone(),
+ (Some(v), None) => self.update(k, v),
+ (None, Some((_, _, m))) => m,
+ (Some(v), Some((_, _, m))) => m.update(k, v),
+ }
+ }
+
+ /// Remove a key/value pair from a map, if it exists.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn without<BK>(&self, k: &BK) -> Self
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.extract(k)
+ .map(|(_, m)| m)
+ .unwrap_or_else(|| self.clone())
+ }
+
+ /// Remove a key/value pair from a map, if it exists, and return
+ /// the removed value as well as the updated list.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn extract<BK>(&self, k: &BK) -> Option<(V, Self)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ self.extract_with_key(k).map(|(_, v, m)| (v, m))
+ }
+
+ /// Remove a key/value pair from a map, if it exists, and return
+ /// the removed key and value as well as the updated list.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn extract_with_key<BK>(&self, k: &BK) -> Option<(K, V, Self)>
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ let mut out = self.clone();
+ let result = out.remove_with_key(k);
+ result.map(|(k, v)| (k, v, out))
+ }
+
+ /// Construct the union of two maps, keeping the values in the
+ /// current map when keys exist in both maps.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 3};
+ /// let map2 = ordmap!{2 => 2, 3 => 4};
+ /// let expected = ordmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert_eq!(expected, map1.union(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn union(self, other: Self) -> Self {
+ let (mut to_mutate, to_consume) = if self.len() >= other.len() {
+ (self, other)
+ } else {
+ (other, self)
+ };
+ for (k, v) in to_consume {
+ to_mutate.entry(k).or_insert(v);
+ }
+ to_mutate
+ }
+
+ /// Construct the union of two maps, using a function to decide
+ /// what to do with the value when a key is in both maps.
+ ///
+ /// The function is called when a value exists in both maps, and
+ /// receives the value from the current map as its first argument,
+ /// and the value from the other map as the second. It should
+ /// return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ #[inline]
+ #[must_use]
+ pub fn union_with<F>(self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(V, V) -> V,
+ {
+ self.union_with_key(other, |_, v1, v2| f(v1, v2))
+ }
+
+ /// Construct the union of two maps, using a function to decide
+ /// what to do with the value when a key is in both maps.
+ ///
+ /// The function is called when a value exists in both maps, and
+ /// receives a reference to the key as its first argument, the
+ /// value from the current map as the second argument, and the
+ /// value from the other map as the third argument. It should
+ /// return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 4};
+ /// let map2 = ordmap!{2 => 2, 3 => 5};
+ /// let expected = ordmap!{1 => 1, 2 => 2, 3 => 9};
+ /// assert_eq!(expected, map1.union_with_key(
+ /// map2,
+ /// |key, left, right| left + right
+ /// ));
+ /// ```
+ #[must_use]
+ pub fn union_with_key<F>(self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> V,
+ {
+ if self.len() >= other.len() {
+ self.union_with_key_inner(other, f)
+ } else {
+ other.union_with_key_inner(self, |key, other_value, self_value| {
+ f(key, self_value, other_value)
+ })
+ }
+ }
+
+ fn union_with_key_inner<F>(mut self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> V,
+ {
+ for (key, right_value) in other {
+ match self.remove(&key) {
+ None => {
+ self.insert(key, right_value);
+ }
+ Some(left_value) => {
+ let final_value = f(&key, left_value, right_value);
+ self.insert(key, final_value);
+ }
+ }
+ }
+ self
+ }
+
+ /// Construct the union of a sequence of maps, selecting the value
+ /// of the leftmost when a key appears in more than one map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 3};
+ /// let map2 = ordmap!{2 => 2};
+ /// let expected = ordmap!{1 => 1, 2 => 2, 3 => 3};
+ /// assert_eq!(expected, OrdMap::unions(vec![map1, map2]));
+ /// ```
+ #[must_use]
+ pub fn unions<I>(i: I) -> Self
+ where
+ I: IntoIterator<Item = Self>,
+ {
+ i.into_iter().fold(Self::default(), Self::union)
+ }
+
+ /// Construct the union of a sequence of maps, using a function to
+ /// decide what to do with the value when a key is in more than
+ /// one map.
+ ///
+ /// The function is called when a value exists in multiple maps,
+ /// and receives the value from the current map as its first
+ /// argument, and the value from the next map as the second. It
+ /// should return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn unions_with<I, F>(i: I, f: F) -> Self
+ where
+ I: IntoIterator<Item = Self>,
+ F: Fn(V, V) -> V,
+ {
+ i.into_iter()
+ .fold(Self::default(), |a, b| a.union_with(b, &f))
+ }
+
+ /// Construct the union of a sequence of maps, using a function to
+ /// decide what to do with the value when a key is in more than
+ /// one map.
+ ///
+ /// The function is called when a value exists in multiple maps,
+ /// and receives a reference to the key as its first argument, the
+ /// value from the current map as the second argument, and the
+ /// value from the next map as the third argument. It should
+ /// return the value to be inserted in the resulting map.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn unions_with_key<I, F>(i: I, f: F) -> Self
+ where
+ I: IntoIterator<Item = Self>,
+ F: Fn(&K, V, V) -> V,
+ {
+ i.into_iter()
+ .fold(Self::default(), |a, b| a.union_with_key(b, &f))
+ }
+
+ /// Construct the symmetric difference between two maps by discarding keys
+ /// which occur in both maps.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference`][symmetric_difference] method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 4};
+ /// let map2 = ordmap!{2 => 2, 3 => 5};
+ /// let expected = ordmap!{1 => 1, 2 => 2};
+ /// assert_eq!(expected, map1.difference(map2));
+ /// ```
+ ///
+ /// [symmetric_difference]: #method.symmetric_difference
+ #[inline]
+ #[must_use]
+ pub fn difference(self, other: Self) -> Self {
+ self.symmetric_difference(other)
+ }
+
+ /// Construct the symmetric difference between two maps by discarding keys
+ /// which occur in both maps.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 4};
+ /// let map2 = ordmap!{2 => 2, 3 => 5};
+ /// let expected = ordmap!{1 => 1, 2 => 2};
+ /// assert_eq!(expected, map1.symmetric_difference(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn symmetric_difference(self, other: Self) -> Self {
+ self.symmetric_difference_with_key(other, |_, _, _| None)
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference_with`][symmetric_difference_with] method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// [symmetric_difference_with]: #method.symmetric_difference_with
+ #[inline]
+ #[must_use]
+ pub fn difference_with<F>(self, other: Self, f: F) -> Self
+ where
+ F: FnMut(V, V) -> Option<V>,
+ {
+ self.symmetric_difference_with(other, f)
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both.
+ ///
+ /// Time: O(n log n)
+ #[inline]
+ #[must_use]
+ pub fn symmetric_difference_with<F>(self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(V, V) -> Option<V>,
+ {
+ self.symmetric_difference_with_key(other, |_, a, b| f(a, b))
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both. The function
+ /// receives the key as well as both values.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference_with_key`][symmetric_difference_with_key]
+ /// method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 4};
+ /// let map2 = ordmap!{2 => 2, 3 => 5};
+ /// let expected = ordmap!{1 => 1, 2 => 2, 3 => 9};
+ /// assert_eq!(expected, map1.difference_with_key(
+ /// map2,
+ /// |key, left, right| Some(left + right)
+ /// ));
+ /// ```
+ /// [symmetric_difference_with_key]: #method.symmetric_difference_with_key
+ #[must_use]
+ pub fn difference_with_key<F>(self, other: Self, f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> Option<V>,
+ {
+ self.symmetric_difference_with_key(other, f)
+ }
+
+ /// Construct the symmetric difference between two maps by using a function
+ /// to decide what to do if a key occurs in both. The function
+ /// receives the key as well as both values.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 4};
+ /// let map2 = ordmap!{2 => 2, 3 => 5};
+ /// let expected = ordmap!{1 => 1, 2 => 2, 3 => 9};
+ /// assert_eq!(expected, map1.symmetric_difference_with_key(
+ /// map2,
+ /// |key, left, right| Some(left + right)
+ /// ));
+ /// ```
+ #[must_use]
+ pub fn symmetric_difference_with_key<F>(mut self, other: Self, mut f: F) -> Self
+ where
+ F: FnMut(&K, V, V) -> Option<V>,
+ {
+ let mut out = Self::default();
+ for (key, right_value) in other {
+ match self.remove(&key) {
+ None => {
+ out.insert(key, right_value);
+ }
+ Some(left_value) => {
+ if let Some(final_value) = f(&key, left_value, right_value) {
+ out.insert(key, final_value);
+ }
+ }
+ }
+ }
+ out.union(self)
+ }
+
+ /// Construct the relative complement between two maps by discarding keys
+ /// which occur in `other`.
+ ///
+ /// Time: O(m log n) where m is the size of the other map
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 3 => 4};
+ /// let map2 = ordmap!{2 => 2, 3 => 5};
+ /// let expected = ordmap!{1 => 1};
+ /// assert_eq!(expected, map1.relative_complement(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn relative_complement(mut self, other: Self) -> Self {
+ for (key, _) in other {
+ let _ = self.remove(&key);
+ }
+ self
+ }
+
+ /// Construct the intersection of two maps, keeping the values
+ /// from the current map.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 2 => 2};
+ /// let map2 = ordmap!{2 => 3, 3 => 4};
+ /// let expected = ordmap!{2 => 2};
+ /// assert_eq!(expected, map1.intersection(map2));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn intersection(self, other: Self) -> Self {
+ self.intersection_with_key(other, |_, v, _| v)
+ }
+
+ /// Construct the intersection of two maps, calling a function
+ /// with both values for each key and using the result as the
+ /// value for the key.
+ ///
+ /// Time: O(n log n)
+ #[inline]
+ #[must_use]
+ pub fn intersection_with<B, C, F>(self, other: OrdMap<K, B>, mut f: F) -> OrdMap<K, C>
+ where
+ B: Clone,
+ C: Clone,
+ F: FnMut(V, B) -> C,
+ {
+ self.intersection_with_key(other, |_, v1, v2| f(v1, v2))
+ }
+
+ /// Construct the intersection of two maps, calling a function
+ /// with the key and both values for each key and using the result
+ /// as the value for the key.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordmap::OrdMap;
+ /// let map1 = ordmap!{1 => 1, 2 => 2};
+ /// let map2 = ordmap!{2 => 3, 3 => 4};
+ /// let expected = ordmap!{2 => 5};
+ /// assert_eq!(expected, map1.intersection_with_key(
+ /// map2,
+ /// |key, left, right| left + right
+ /// ));
+ /// ```
+ #[must_use]
+ pub fn intersection_with_key<B, C, F>(mut self, other: OrdMap<K, B>, mut f: F) -> OrdMap<K, C>
+ where
+ B: Clone,
+ C: Clone,
+ F: FnMut(&K, V, B) -> C,
+ {
+ let mut out = OrdMap::<K, C>::default();
+ for (key, right_value) in other {
+ match self.remove(&key) {
+ None => (),
+ Some(left_value) => {
+ let result = f(&key, left_value, right_value);
+ out.insert(key, result);
+ }
+ }
+ }
+ out
+ }
+
+ /// Split a map into two, with the left hand map containing keys
+ /// which are smaller than `split`, and the right hand map
+ /// containing keys which are larger than `split`.
+ ///
+ /// The `split` mapping is discarded.
+ #[must_use]
+ pub fn split<BK>(&self, split: &BK) -> (Self, Self)
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ let (l, _, r) = self.split_lookup(split);
+ (l, r)
+ }
+
+ /// Split a map into two, with the left hand map containing keys
+ /// which are smaller than `split`, and the right hand map
+ /// containing keys which are larger than `split`.
+ ///
+ /// Returns both the two maps and the value of `split`.
+ #[must_use]
+ pub fn split_lookup<BK>(&self, split: &BK) -> (Self, Option<V>, Self)
+ where
+ BK: Ord + ?Sized,
+ K: Borrow<BK>,
+ {
+ // TODO this is atrociously slow, got to be a better way
+ self.iter()
+ .fold((ordmap![], None, ordmap![]), |(l, m, r), (k, v)| {
+ match k.borrow().cmp(split) {
+ Ordering::Less => (l.update(k.clone(), v.clone()), m, r),
+ Ordering::Equal => (l, Some(v.clone()), r),
+ Ordering::Greater => (l, m, r.update(k.clone(), v.clone())),
+ }
+ })
+ }
+
+ /// Construct a map with only the `n` smallest keys from a given
+ /// map.
+ #[must_use]
+ pub fn take(&self, n: usize) -> Self {
+ self.iter()
+ .take(n)
+ .map(|(k, v)| (k.clone(), v.clone()))
+ .collect()
+ }
+
+ /// Construct a map with the `n` smallest keys removed from a
+ /// given map.
+ #[must_use]
+ pub fn skip(&self, n: usize) -> Self {
+ self.iter()
+ .skip(n)
+ .map(|(k, v)| (k.clone(), v.clone()))
+ .collect()
+ }
+
+ /// Remove the smallest key from a map, and return its value as
+ /// well as the updated map.
+ #[must_use]
+ pub fn without_min(&self) -> (Option<V>, Self) {
+ let (pop, next) = self.without_min_with_key();
+ (pop.map(|(_, v)| v), next)
+ }
+
+ /// Remove the smallest key from a map, and return that key, its
+ /// value as well as the updated map.
+ #[must_use]
+ pub fn without_min_with_key(&self) -> (Option<(K, V)>, Self) {
+ match self.get_min() {
+ None => (None, self.clone()),
+ Some((k, _)) => {
+ let (key, value, next) = self.extract_with_key(k).unwrap();
+ (Some((key, value)), next)
+ }
+ }
+ }
+
+ /// Remove the largest key from a map, and return its value as
+ /// well as the updated map.
+ #[must_use]
+ pub fn without_max(&self) -> (Option<V>, Self) {
+ let (pop, next) = self.without_max_with_key();
+ (pop.map(|(_, v)| v), next)
+ }
+
+ /// Remove the largest key from a map, and return that key, its
+ /// value as well as the updated map.
+ #[must_use]
+ pub fn without_max_with_key(&self) -> (Option<(K, V)>, Self) {
+ match self.get_max() {
+ None => (None, self.clone()),
+ Some((k, _)) => {
+ let (key, value, next) = self.extract_with_key(k).unwrap();
+ (Some((key, value)), next)
+ }
+ }
+ }
+
+ /// Get the [`Entry`][Entry] for a key in the map for in-place manipulation.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [Entry]: enum.Entry.html
+ #[must_use]
+ pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
+ if self.contains_key(&key) {
+ Entry::Occupied(OccupiedEntry { map: self, key })
+ } else {
+ Entry::Vacant(VacantEntry { map: self, key })
+ }
+ }
+}
+
+// Entries
+
+/// A handle for a key and its associated value.
+pub enum Entry<'a, K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ /// An entry which exists in the map.
+ Occupied(OccupiedEntry<'a, K, V>),
+ /// An entry which doesn't exist in the map.
+ Vacant(VacantEntry<'a, K, V>),
+}
+
+impl<'a, K, V> Entry<'a, K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ /// Insert the default value provided if there was no value
+ /// already, and return a mutable reference to the value.
+ pub fn or_insert(self, default: V) -> &'a mut V {
+ self.or_insert_with(|| default)
+ }
+
+ /// Insert the default value from the provided function if there
+ /// was no value already, and return a mutable reference to the
+ /// value.
+ pub fn or_insert_with<F>(self, default: F) -> &'a mut V
+ where
+ F: FnOnce() -> V,
+ {
+ match self {
+ Entry::Occupied(entry) => entry.into_mut(),
+ Entry::Vacant(entry) => entry.insert(default()),
+ }
+ }
+
+ /// Insert a default value if there was no value already, and
+ /// return a mutable reference to the value.
+ pub fn or_default(self) -> &'a mut V
+ where
+ V: Default,
+ {
+ self.or_insert_with(Default::default)
+ }
+
+ /// Get the key for this entry.
+ #[must_use]
+ pub fn key(&self) -> &K {
+ match self {
+ Entry::Occupied(entry) => entry.key(),
+ Entry::Vacant(entry) => entry.key(),
+ }
+ }
+
+ /// Call the provided function to modify the value if the value
+ /// exists.
+ pub fn and_modify<F>(mut self, f: F) -> Self
+ where
+ F: FnOnce(&mut V),
+ {
+ match &mut self {
+ Entry::Occupied(ref mut entry) => f(entry.get_mut()),
+ Entry::Vacant(_) => (),
+ }
+ self
+ }
+}
+
+/// An entry for a mapping that already exists in the map.
+pub struct OccupiedEntry<'a, K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ map: &'a mut OrdMap<K, V>,
+ key: K,
+}
+
+impl<'a, K, V> OccupiedEntry<'a, K, V>
+where
+ K: 'a + Ord + Clone,
+ V: 'a + Clone,
+{
+ /// Get the key for this entry.
+ #[must_use]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Remove this entry from the map and return the removed mapping.
+ pub fn remove_entry(self) -> (K, V) {
+ self.map
+ .remove_with_key(&self.key)
+ .expect("ordmap::OccupiedEntry::remove_entry: key has vanished!")
+ }
+
+ /// Get the current value.
+ #[must_use]
+ pub fn get(&self) -> &V {
+ self.map.get(&self.key).unwrap()
+ }
+
+ /// Get a mutable reference to the current value.
+ #[must_use]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.map.get_mut(&self.key).unwrap()
+ }
+
+ /// Convert this entry into a mutable reference.
+ #[must_use]
+ pub fn into_mut(self) -> &'a mut V {
+ self.map.get_mut(&self.key).unwrap()
+ }
+
+ /// Overwrite the current value.
+ pub fn insert(&mut self, value: V) -> V {
+ mem::replace(self.get_mut(), value)
+ }
+
+ /// Remove this entry from the map and return the removed value.
+ pub fn remove(self) -> V {
+ self.remove_entry().1
+ }
+}
+
+/// An entry for a mapping that does not already exist in the map.
+pub struct VacantEntry<'a, K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ map: &'a mut OrdMap<K, V>,
+ key: K,
+}
+
+impl<'a, K, V> VacantEntry<'a, K, V>
+where
+ K: 'a + Ord + Clone,
+ V: 'a + Clone,
+{
+ /// Get the key for this entry.
+ #[must_use]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Convert this entry into its key.
+ #[must_use]
+ pub fn into_key(self) -> K {
+ self.key
+ }
+
+ /// Insert a value into this entry.
+ pub fn insert(self, value: V) -> &'a mut V {
+ self.map.insert(self.key.clone(), value);
+ // TODO insert_mut ought to return this reference
+ self.map.get_mut(&self.key).unwrap()
+ }
+}
+
+// Core traits
+
+impl<K, V> Clone for OrdMap<K, V> {
+ /// Clone a map.
+ ///
+ /// Time: O(1)
+ #[inline]
+ fn clone(&self) -> Self {
+ OrdMap {
+ size: self.size,
+ pool: self.pool.clone(),
+ root: self.root.clone(),
+ }
+ }
+}
+
+#[cfg(not(has_specialisation))]
+impl<K, V> PartialEq for OrdMap<K, V>
+where
+ K: Ord + PartialEq,
+ V: PartialEq,
+{
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.diff(other).next().is_none()
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K, V> PartialEq for OrdMap<K, V>
+where
+ K: Ord + PartialEq,
+ V: PartialEq,
+{
+ default fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.diff(other).next().is_none()
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<K, V> PartialEq for OrdMap<K, V>
+where
+ K: Ord + Eq,
+ V: Eq,
+{
+ fn eq(&self, other: &Self) -> bool {
+ PoolRef::ptr_eq(&self.root, &other.root)
+ || (self.len() == other.len() && self.diff(other).next().is_none())
+ }
+}
+
+impl<K: Ord + Eq, V: Eq> Eq for OrdMap<K, V> {}
+
+impl<K, V> PartialOrd for OrdMap<K, V>
+where
+ K: Ord,
+ V: PartialOrd,
+{
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+impl<K, V> Ord for OrdMap<K, V>
+where
+ K: Ord,
+ V: Ord,
+{
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+impl<K, V> Hash for OrdMap<K, V>
+where
+ K: Ord + Hash,
+ V: Hash,
+{
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ for i in self.iter() {
+ i.hash(state);
+ }
+ }
+}
+
+impl<K, V> Default for OrdMap<K, V> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<'a, K, V> Add for &'a OrdMap<K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ type Output = OrdMap<K, V>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.clone().union(other.clone())
+ }
+}
+
+impl<K, V> Add for OrdMap<K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ type Output = OrdMap<K, V>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.union(other)
+ }
+}
+
+impl<K, V> Sum for OrdMap<K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ fn sum<I>(it: I) -> Self
+ where
+ I: Iterator<Item = Self>,
+ {
+ it.fold(Self::default(), |a, b| a + b)
+ }
+}
+
+impl<K, V, RK, RV> Extend<(RK, RV)> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<RK>,
+ V: Clone + From<RV>,
+{
+ fn extend<I>(&mut self, iter: I)
+ where
+ I: IntoIterator<Item = (RK, RV)>,
+ {
+ for (key, value) in iter {
+ self.insert(From::from(key), From::from(value));
+ }
+ }
+}
+
+impl<'a, BK, K, V> Index<&'a BK> for OrdMap<K, V>
+where
+ BK: Ord + ?Sized,
+ K: Ord + Borrow<BK>,
+{
+ type Output = V;
+
+ fn index(&self, key: &BK) -> &Self::Output {
+ match self.root.lookup(key) {
+ None => panic!("OrdMap::index: invalid key"),
+ Some(&(_, ref value)) => value,
+ }
+ }
+}
+
+impl<'a, BK, K, V> IndexMut<&'a BK> for OrdMap<K, V>
+where
+ BK: Ord + ?Sized,
+ K: Ord + Clone + Borrow<BK>,
+ V: Clone,
+{
+ fn index_mut(&mut self, key: &BK) -> &mut Self::Output {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.lookup_mut(&self.pool.0, key) {
+ None => panic!("OrdMap::index: invalid key"),
+ Some(&mut (_, ref mut value)) => value,
+ }
+ }
+}
+
+impl<K, V> Debug for OrdMap<K, V>
+where
+ K: Ord + Debug,
+ V: Debug,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ let mut d = f.debug_map();
+ for (k, v) in self.iter() {
+ d.entry(k, v);
+ }
+ d.finish()
+ }
+}
+
+// Iterators
+
+/// An iterator over the key/value pairs of a map.
+pub struct Iter<'a, K, V> {
+ it: RangedIter<'a, (K, V)>,
+}
+
+impl<'a, K, V> Iterator for Iter<'a, K, V>
+where
+ (K, V): 'a + BTreeValue,
+{
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|(k, v)| (k, v))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.it.remaining, Some(self.it.remaining))
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V>
+where
+ (K, V): 'a + BTreeValue,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.it.next_back().map(|(k, v)| (k, v))
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> where (K, V): 'a + BTreeValue {}
+
+/// An iterator over the differences between two maps.
+pub struct DiffIter<'a, K, V> {
+ it: NodeDiffIter<'a, (K, V)>,
+}
+
+/// A description of a difference between two ordered maps.
+#[derive(PartialEq, Eq, Debug)]
+pub enum DiffItem<'a, K, V> {
+ /// This value has been added to the new map.
+ Add(&'a K, &'a V),
+ /// This value has been changed between the two maps.
+ Update {
+ /// The old value.
+ old: (&'a K, &'a V),
+ /// The new value.
+ new: (&'a K, &'a V),
+ },
+ /// This value has been removed from the new map.
+ Remove(&'a K, &'a V),
+}
+
+impl<'a, K, V> Iterator for DiffIter<'a, K, V>
+where
+ (K, V): 'a + BTreeValue + PartialEq,
+{
+ type Item = DiffItem<'a, K, V>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|item| match item {
+ NodeDiffItem::Add((k, v)) => DiffItem::Add(k, v),
+ NodeDiffItem::Update {
+ old: (oldk, oldv),
+ new: (newk, newv),
+ } => DiffItem::Update {
+ old: (oldk, oldv),
+ new: (newk, newv),
+ },
+ NodeDiffItem::Remove((k, v)) => DiffItem::Remove(k, v),
+ })
+ }
+}
+
+/// An iterator ove the keys of a map.
+pub struct Keys<'a, K, V> {
+ it: Iter<'a, K, V>,
+}
+
+impl<'a, K, V> Iterator for Keys<'a, K, V>
+where
+ K: 'a + Ord,
+ V: 'a,
+{
+ type Item = &'a K;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V>
+where
+ K: 'a + Ord,
+ V: 'a,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.it.next_back().map(|(k, _)| k)
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V>
+where
+ K: 'a + Ord,
+ V: 'a,
+{
+}
+
+/// An iterator over the values of a map.
+pub struct Values<'a, K, V> {
+ it: Iter<'a, K, V>,
+}
+
+impl<'a, K, V> Iterator for Values<'a, K, V>
+where
+ K: 'a + Ord,
+ V: 'a,
+{
+ type Item = &'a V;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V>
+where
+ K: 'a + Ord,
+ V: 'a,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.it.next_back().map(|(_, v)| v)
+ }
+}
+
+impl<'a, K, V> ExactSizeIterator for Values<'a, K, V>
+where
+ K: 'a + Ord,
+ V: 'a,
+{
+}
+
+impl<K, V, RK, RV> FromIterator<(RK, RV)> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<RK>,
+ V: Clone + From<RV>,
+{
+ fn from_iter<T>(i: T) -> Self
+ where
+ T: IntoIterator<Item = (RK, RV)>,
+ {
+ let mut m = OrdMap::default();
+ for (k, v) in i {
+ m.insert(From::from(k), From::from(v));
+ }
+ m
+ }
+}
+
+impl<'a, K, V> IntoIterator for &'a OrdMap<K, V>
+where
+ K: Ord,
+{
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<K, V> IntoIterator for OrdMap<K, V>
+where
+ K: Ord + Clone,
+ V: Clone,
+{
+ type Item = (K, V);
+ type IntoIter = ConsumingIter<(K, V)>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ ConsumingIter::new(&self.root, self.size)
+ }
+}
+
+// Conversions
+
+impl<K, V> AsRef<OrdMap<K, V>> for OrdMap<K, V> {
+ fn as_ref(&self) -> &Self {
+ self
+ }
+}
+
+impl<'m, 'k, 'v, K, V, OK, OV> From<&'m OrdMap<&'k K, &'v V>> for OrdMap<OK, OV>
+where
+ K: Ord + ToOwned<Owned = OK> + ?Sized,
+ V: ToOwned<Owned = OV> + ?Sized,
+ OK: Ord + Clone + Borrow<K>,
+ OV: Clone + Borrow<V>,
+{
+ fn from(m: &OrdMap<&K, &V>) -> Self {
+ m.iter()
+ .map(|(k, v)| ((*k).to_owned(), (*v).to_owned()))
+ .collect()
+ }
+}
+
+impl<'a, K, V, RK, RV, OK, OV> From<&'a [(RK, RV)]> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<OK>,
+ V: Clone + From<OV>,
+ OK: Borrow<RK>,
+ OV: Borrow<RV>,
+ RK: ToOwned<Owned = OK>,
+ RV: ToOwned<Owned = OV>,
+{
+ fn from(m: &'a [(RK, RV)]) -> OrdMap<K, V> {
+ m.iter()
+ .map(|&(ref k, ref v)| (k.to_owned(), v.to_owned()))
+ .collect()
+ }
+}
+
+impl<K, V, RK, RV> From<Vec<(RK, RV)>> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<RK>,
+ V: Clone + From<RV>,
+{
+ fn from(m: Vec<(RK, RV)>) -> OrdMap<K, V> {
+ m.into_iter().collect()
+ }
+}
+
+impl<'a, K: Ord, V, RK, RV, OK, OV> From<&'a Vec<(RK, RV)>> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<OK>,
+ V: Clone + From<OV>,
+ OK: Borrow<RK>,
+ OV: Borrow<RV>,
+ RK: ToOwned<Owned = OK>,
+ RV: ToOwned<Owned = OV>,
+{
+ fn from(m: &'a Vec<(RK, RV)>) -> OrdMap<K, V> {
+ m.iter()
+ .map(|&(ref k, ref v)| (k.to_owned(), v.to_owned()))
+ .collect()
+ }
+}
+
+impl<K: Ord, V, RK: Eq + Hash, RV> From<collections::HashMap<RK, RV>> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<RK>,
+ V: Clone + From<RV>,
+{
+ fn from(m: collections::HashMap<RK, RV>) -> OrdMap<K, V> {
+ m.into_iter().collect()
+ }
+}
+
+impl<'a, K, V, OK, OV, RK, RV> From<&'a collections::HashMap<RK, RV>> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<OK>,
+ V: Clone + From<OV>,
+ OK: Borrow<RK>,
+ OV: Borrow<RV>,
+ RK: Hash + Eq + ToOwned<Owned = OK>,
+ RV: ToOwned<Owned = OV>,
+{
+ fn from(m: &'a collections::HashMap<RK, RV>) -> OrdMap<K, V> {
+ m.iter()
+ .map(|(k, v)| (k.to_owned(), v.to_owned()))
+ .collect()
+ }
+}
+
+impl<K: Ord, V, RK, RV> From<collections::BTreeMap<RK, RV>> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<RK>,
+ V: Clone + From<RV>,
+{
+ fn from(m: collections::BTreeMap<RK, RV>) -> OrdMap<K, V> {
+ m.into_iter().collect()
+ }
+}
+
+impl<'a, K: Ord, V, RK, RV, OK, OV> From<&'a collections::BTreeMap<RK, RV>> for OrdMap<K, V>
+where
+ K: Ord + Clone + From<OK>,
+ V: Clone + From<OV>,
+ OK: Borrow<RK>,
+ OV: Borrow<RV>,
+ RK: Ord + ToOwned<Owned = OK>,
+ RV: ToOwned<Owned = OV>,
+{
+ fn from(m: &'a collections::BTreeMap<RK, RV>) -> OrdMap<K, V> {
+ m.iter()
+ .map(|(k, v)| (k.to_owned(), v.to_owned()))
+ .collect()
+ }
+}
+
+impl<K: Ord + Hash + Eq + Clone, V: Clone, S: BuildHasher> From<HashMap<K, V, S>> for OrdMap<K, V> {
+ fn from(m: HashMap<K, V, S>) -> Self {
+ m.into_iter().collect()
+ }
+}
+
+impl<'a, K: Ord + Hash + Eq + Clone, V: Clone, S: BuildHasher> From<&'a HashMap<K, V, S>>
+ for OrdMap<K, V>
+{
+ fn from(m: &'a HashMap<K, V, S>) -> Self {
+ m.iter().map(|(k, v)| (k.clone(), v.clone())).collect()
+ }
+}
+
+// Proptest
+#[cfg(any(test, feature = "proptest"))]
+#[doc(hidden)]
+pub mod proptest {
+ #[deprecated(
+ since = "14.3.0",
+ note = "proptest strategies have moved to im::proptest"
+ )]
+ pub use crate::proptest::ord_map;
+}
+
+// Tests
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::proptest::*;
+ use crate::test::is_sorted;
+ use ::proptest::num::{i16, usize};
+ use ::proptest::{bool, collection, proptest};
+
+ #[test]
+ fn iterates_in_order() {
+ let map = ordmap! {
+ 2 => 22,
+ 1 => 11,
+ 3 => 33,
+ 8 => 88,
+ 9 => 99,
+ 4 => 44,
+ 5 => 55,
+ 7 => 77,
+ 6 => 66
+ };
+ let mut it = map.iter();
+ assert_eq!(it.next(), Some((&1, &11)));
+ assert_eq!(it.next(), Some((&2, &22)));
+ assert_eq!(it.next(), Some((&3, &33)));
+ assert_eq!(it.next(), Some((&4, &44)));
+ assert_eq!(it.next(), Some((&5, &55)));
+ assert_eq!(it.next(), Some((&6, &66)));
+ assert_eq!(it.next(), Some((&7, &77)));
+ assert_eq!(it.next(), Some((&8, &88)));
+ assert_eq!(it.next(), Some((&9, &99)));
+ assert_eq!(it.next(), None);
+ }
+
+ #[test]
+ fn into_iter() {
+ let map = ordmap! {
+ 2 => 22,
+ 1 => 11,
+ 3 => 33,
+ 8 => 88,
+ 9 => 99,
+ 4 => 44,
+ 5 => 55,
+ 7 => 77,
+ 6 => 66
+ };
+ let mut vec = vec![];
+ for (k, v) in map {
+ assert_eq!(k * 11, v);
+ vec.push(k)
+ }
+ assert_eq!(vec, vec![1, 2, 3, 4, 5, 6, 7, 8, 9]);
+ }
+
+ #[test]
+ fn deletes_correctly() {
+ let map = ordmap! {
+ 2 => 22,
+ 1 => 11,
+ 3 => 33,
+ 8 => 88,
+ 9 => 99,
+ 4 => 44,
+ 5 => 55,
+ 7 => 77,
+ 6 => 66
+ };
+ assert_eq!(map.extract(&11), None);
+ let (popped, less) = map.extract(&5).unwrap();
+ assert_eq!(popped, 55);
+ let mut it = less.iter();
+ assert_eq!(it.next(), Some((&1, &11)));
+ assert_eq!(it.next(), Some((&2, &22)));
+ assert_eq!(it.next(), Some((&3, &33)));
+ assert_eq!(it.next(), Some((&4, &44)));
+ assert_eq!(it.next(), Some((&6, &66)));
+ assert_eq!(it.next(), Some((&7, &77)));
+ assert_eq!(it.next(), Some((&8, &88)));
+ assert_eq!(it.next(), Some((&9, &99)));
+ assert_eq!(it.next(), None);
+ }
+
+ #[test]
+ fn debug_output() {
+ assert_eq!(
+ format!("{:?}", ordmap! { 3 => 4, 5 => 6, 1 => 2 }),
+ "{1: 2, 3: 4, 5: 6}"
+ );
+ }
+
+ #[test]
+ fn equality2() {
+ let v1 = "1".to_string();
+ let v2 = "1".to_string();
+ assert_eq!(v1, v2);
+ let p1 = Vec::<String>::new();
+ let p2 = Vec::<String>::new();
+ assert_eq!(p1, p2);
+ let c1 = OrdMap::unit(v1, p1);
+ let c2 = OrdMap::unit(v2, p2);
+ assert_eq!(c1, c2);
+ }
+
+ #[test]
+ fn insert_remove_single_mut() {
+ let mut m = OrdMap::new();
+ m.insert(0, 0);
+ assert_eq!(OrdMap::unit(0, 0), m);
+ m.remove(&0);
+ assert_eq!(OrdMap::new(), m);
+ }
+
+ #[test]
+ fn double_ended_iterator_1() {
+ let m = ordmap! {1 => 1, 2 => 2, 3 => 3, 4 => 4};
+ let mut it = m.iter();
+ assert_eq!(Some((&1, &1)), it.next());
+ assert_eq!(Some((&4, &4)), it.next_back());
+ assert_eq!(Some((&2, &2)), it.next());
+ assert_eq!(Some((&3, &3)), it.next_back());
+ assert_eq!(None, it.next());
+ }
+
+ #[test]
+ fn double_ended_iterator_2() {
+ let m = ordmap! {1 => 1, 2 => 2, 3 => 3, 4 => 4};
+ let mut it = m.iter();
+ assert_eq!(Some((&1, &1)), it.next());
+ assert_eq!(Some((&4, &4)), it.next_back());
+ assert_eq!(Some((&2, &2)), it.next());
+ assert_eq!(Some((&3, &3)), it.next_back());
+ assert_eq!(None, it.next_back());
+ }
+
+ #[test]
+ fn safe_mutation() {
+ let v1 = (0..131_072).map(|i| (i, i)).collect::<OrdMap<_, _>>();
+ let mut v2 = v1.clone();
+ v2.insert(131_000, 23);
+ assert_eq!(Some(&23), v2.get(&131_000));
+ assert_eq!(Some(&131_000), v1.get(&131_000));
+ }
+
+ #[test]
+ fn index_operator() {
+ let mut map = ordmap! {1 => 2, 3 => 4, 5 => 6};
+ assert_eq!(4, map[&3]);
+ map[&3] = 8;
+ assert_eq!(ordmap! {1 => 2, 3 => 8, 5 => 6}, map);
+ }
+
+ #[test]
+ fn entry_api() {
+ let mut map = ordmap! {"bar" => 5};
+ map.entry("foo").and_modify(|v| *v += 5).or_insert(1);
+ assert_eq!(1, map[&"foo"]);
+ map.entry("foo").and_modify(|v| *v += 5).or_insert(1);
+ assert_eq!(6, map[&"foo"]);
+ map.entry("bar").and_modify(|v| *v += 5).or_insert(1);
+ assert_eq!(10, map[&"bar"]);
+ assert_eq!(
+ 10,
+ match map.entry("bar") {
+ Entry::Occupied(entry) => entry.remove(),
+ _ => panic!(),
+ }
+ );
+ assert!(!map.contains_key(&"bar"));
+ }
+
+ #[test]
+ fn match_string_keys_with_string_slices() {
+ let mut map: OrdMap<String, i32> =
+ From::from(&ordmap! { "foo" => &1, "bar" => &2, "baz" => &3 });
+ assert_eq!(Some(&1), map.get("foo"));
+ map = map.without("foo");
+ assert_eq!(Some(3), map.remove("baz"));
+ map["bar"] = 8;
+ assert_eq!(8, map["bar"]);
+ }
+
+ #[test]
+ fn ranged_iter() {
+ let map: OrdMap<i32, i32> = ordmap![1=>2, 2=>3, 3=>4, 4=>5, 5=>6, 7=>8];
+ let range: Vec<(i32, i32)> = map.range(..).map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (7, 8)], range);
+ let range: Vec<(i32, i32)> = map.range(..).rev().map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(7, 8), (5, 6), (4, 5), (3, 4), (2, 3), (1, 2)], range);
+ let range: Vec<(i32, i32)> = map.range(2..5).map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(2, 3), (3, 4), (4, 5)], range);
+ let range: Vec<(i32, i32)> = map.range(2..5).rev().map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(4, 5), (3, 4), (2, 3)], range);
+ let range: Vec<(i32, i32)> = map.range(3..).map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(3, 4), (4, 5), (5, 6), (7, 8)], range);
+ let range: Vec<(i32, i32)> = map.range(3..).rev().map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(7, 8), (5, 6), (4, 5), (3, 4)], range);
+ let range: Vec<(i32, i32)> = map.range(..4).map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(1, 2), (2, 3), (3, 4)], range);
+ let range: Vec<(i32, i32)> = map.range(..4).rev().map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(3, 4), (2, 3), (1, 2)], range);
+ let range: Vec<(i32, i32)> = map.range(..=3).map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(1, 2), (2, 3), (3, 4)], range);
+ let range: Vec<(i32, i32)> = map.range(..=3).rev().map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(3, 4), (2, 3), (1, 2)], range);
+ let range: Vec<(i32, i32)> = map.range(..6).map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(1, 2), (2, 3), (3, 4), (4, 5), (5, 6)], range);
+ let range: Vec<(i32, i32)> = map.range(..=6).map(|(k, v)| (*k, *v)).collect();
+ assert_eq!(vec![(1, 2), (2, 3), (3, 4), (4, 5), (5, 6)], range);
+ }
+
+ #[test]
+ fn range_iter_big() {
+ use crate::nodes::btree::NODE_SIZE;
+ use std::ops::Bound::Included;
+ const N: usize = NODE_SIZE * NODE_SIZE * 5; // enough for a sizeable 3 level tree
+
+ let data = (1usize..N).filter(|i| i % 2 == 0).map(|i| (i, ()));
+ let bmap = data
+ .clone()
+ .collect::<std::collections::BTreeMap<usize, ()>>();
+ let omap = data.collect::<OrdMap<usize, ()>>();
+
+ for i in (0..NODE_SIZE * 5).chain(N - NODE_SIZE * 5..=N + 1) {
+ assert_eq!(omap.range(i..).count(), bmap.range(i..).count());
+ assert_eq!(omap.range(..i).count(), bmap.range(..i).count());
+ assert_eq!(omap.range(i..i + 7).count(), bmap.range(i..i + 7).count());
+ assert_eq!(omap.range(i..=i + 7).count(), bmap.range(i..=i + 7).count());
+ assert_eq!(
+ omap.range((Included(i), Included(i + 7))).count(),
+ bmap.range((Included(i), Included(i + 7))).count(),
+ );
+ }
+ }
+
+ #[test]
+ fn issue_124() {
+ let mut map = OrdMap::new();
+ let contents = include_str!("test-fixtures/issue_124.txt");
+ for line in contents.lines() {
+ if line.starts_with("insert ") {
+ map.insert(line[7..].parse::<u32>().unwrap(), 0);
+ } else if line.starts_with("remove ") {
+ map.remove(&line[7..].parse::<u32>().unwrap());
+ }
+ }
+ }
+
+ proptest! {
+ #[test]
+ fn length(ref input in collection::btree_map(i16::ANY, i16::ANY, 0..1000)) {
+ let map: OrdMap<i32, i32> = OrdMap::from(input.clone());
+ assert_eq!(input.len(), map.len());
+ }
+
+ #[test]
+ fn order(ref input in collection::hash_map(i16::ANY, i16::ANY, 0..1000)) {
+ let map: OrdMap<i32, i32> = OrdMap::from(input.clone());
+ assert!(is_sorted(map.keys()));
+ }
+
+ #[test]
+ fn overwrite_values(ref vec in collection::vec((i16::ANY, i16::ANY), 1..1000), index_rand in usize::ANY, new_val in i16::ANY) {
+ let index = vec[index_rand % vec.len()].0;
+ let map1 = OrdMap::from_iter(vec.clone());
+ let map2 = map1.update(index, new_val);
+ for (k, v) in map2 {
+ if k == index {
+ assert_eq!(v, new_val);
+ } else {
+ match map1.get(&k) {
+ None => panic!("map1 didn't have key {:?}", k),
+ Some(other_v) => {
+ assert_eq!(v, *other_v);
+ }
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn delete_values(ref vec in collection::vec((usize::ANY, usize::ANY), 1..1000), index_rand in usize::ANY) {
+ let index = vec[index_rand % vec.len()].0;
+ let map1: OrdMap<usize, usize> = OrdMap::from_iter(vec.clone());
+ let map2 = map1.without(&index);
+ assert_eq!(map1.len(), map2.len() + 1);
+ for k in map2.keys() {
+ assert_ne!(*k, index);
+ }
+ }
+
+ #[test]
+ fn insert_and_delete_values(
+ ref input in ord_map(0usize..64, 0usize..64, 1..1000),
+ ref ops in collection::vec((bool::ANY, usize::ANY, usize::ANY), 1..1000)
+ ) {
+ let mut map = input.clone();
+ let mut tree: collections::BTreeMap<usize, usize> = input.iter().map(|(k, v)| (*k, *v)).collect();
+ for (ins, key, val) in ops {
+ if *ins {
+ tree.insert(*key, *val);
+ map = map.update(*key, *val)
+ } else {
+ tree.remove(key);
+ map = map.without(key)
+ }
+ }
+ assert!(map.iter().map(|(k, v)| (*k, *v)).eq(tree.iter().map(|(k, v)| (*k, *v))));
+ }
+
+ #[test]
+ fn proptest_works(ref m in ord_map(0..9999, ".*", 10..100)) {
+ assert!(m.len() < 100);
+ assert!(m.len() >= 10);
+ }
+
+ #[test]
+ fn insert_and_length(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..1000)) {
+ let mut map: OrdMap<i16, i16> = OrdMap::new();
+ for (k, v) in m.iter() {
+ map = map.update(*k, *v)
+ }
+ assert_eq!(m.len(), map.len());
+ }
+
+ #[test]
+ fn from_iterator(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..1000)) {
+ let map: OrdMap<i16, i16> =
+ FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ assert_eq!(m.len(), map.len());
+ }
+
+ #[test]
+ fn iterate_over(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..1000)) {
+ let map: OrdMap<i16, i16> =
+ FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ assert_eq!(m.len(), map.iter().count());
+ }
+
+ #[test]
+ fn equality(ref m in collection::hash_map(i16::ANY, i16::ANY, 0..1000)) {
+ let map1: OrdMap<i16, i16> =
+ FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ let map2: OrdMap<i16, i16> =
+ FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ assert_eq!(map1, map2);
+ }
+
+ #[test]
+ fn lookup(ref m in ord_map(i16::ANY, i16::ANY, 0..1000)) {
+ let map: OrdMap<i16, i16> =
+ FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ for (k, v) in m.iter() {
+ assert_eq!(Some(*v), map.get(k).cloned());
+ }
+ }
+
+ #[test]
+ fn remove(ref m in ord_map(i16::ANY, i16::ANY, 0..1000)) {
+ let mut map: OrdMap<i16, i16> =
+ FromIterator::from_iter(m.iter().map(|(k, v)| (*k, *v)));
+ for k in m.keys() {
+ let l = map.len();
+ assert_eq!(m.get(k).cloned(), map.get(k).cloned());
+ map = map.without(k);
+ assert_eq!(None, map.get(k));
+ assert_eq!(l - 1, map.len());
+ }
+ }
+
+ #[test]
+ fn insert_mut(ref m in ord_map(i16::ANY, i16::ANY, 0..1000)) {
+ let mut mut_map = OrdMap::new();
+ let mut map = OrdMap::new();
+ for (k, v) in m.iter() {
+ map = map.update(*k, *v);
+ mut_map.insert(*k, *v);
+ }
+ assert_eq!(map, mut_map);
+ }
+
+ #[test]
+ fn remove_mut(ref orig in ord_map(i16::ANY, i16::ANY, 0..1000)) {
+ let mut map = orig.clone();
+ for key in orig.keys() {
+ let len = map.len();
+ assert_eq!(orig.get(key), map.get(key));
+ assert_eq!(orig.get(key).cloned(), map.remove(key));
+ assert_eq!(None, map.get(key));
+ assert_eq!(len - 1, map.len());
+ }
+ }
+
+ #[test]
+ fn remove_alien(ref orig in collection::hash_map(i16::ANY, i16::ANY, 0..1000)) {
+ let mut map = OrdMap::<i16, i16>::from(orig.clone());
+ for key in orig.keys() {
+ let len = map.len();
+ assert_eq!(orig.get(key), map.get(key));
+ assert_eq!(orig.get(key).cloned(), map.remove(key));
+ assert_eq!(None, map.get(key));
+ assert_eq!(len - 1, map.len());
+ }
+ }
+
+ #[test]
+ fn delete_and_reinsert(
+ ref input in collection::hash_map(i16::ANY, i16::ANY, 1..1000),
+ index_rand in usize::ANY
+ ) {
+ let index = *input.keys().nth(index_rand % input.len()).unwrap();
+ let map1 = OrdMap::from_iter(input.clone());
+ let (val, map2): (i16, _) = map1.extract(&index).unwrap();
+ let map3 = map2.update(index, val);
+ for key in map2.keys() {
+ assert!(*key != index);
+ }
+ assert_eq!(map1.len(), map2.len() + 1);
+ assert_eq!(map1, map3);
+ }
+
+ #[test]
+ fn exact_size_iterator(ref m in ord_map(i16::ANY, i16::ANY, 1..1000)) {
+ let mut should_be = m.len();
+ let mut it = m.iter();
+ loop {
+ assert_eq!(should_be, it.len());
+ match it.next() {
+ None => break,
+ Some(_) => should_be -= 1,
+ }
+ }
+ assert_eq!(0, it.len());
+ }
+
+ #[test]
+ fn diff_all_values(a in collection::vec((usize::ANY, usize::ANY), 1..1000), b in collection::vec((usize::ANY, usize::ANY), 1..1000)) {
+ let a: OrdMap<usize, usize> = OrdMap::from(a);
+ let b: OrdMap<usize, usize> = OrdMap::from(b);
+
+ let diff: Vec<_> = a.diff(&b).collect();
+ let union = b.clone().union(a.clone());
+ let expected: Vec<_> = union.iter().filter_map(|(k, v)| {
+ if a.contains_key(k) {
+ if b.contains_key(k) {
+ let old = a.get(k).unwrap();
+ if old != v {
+ Some(DiffItem::Update {
+ old: (k, old),
+ new: (k, v),
+ })
+ } else {
+ None
+ }
+ } else {
+ Some(DiffItem::Remove(k, v))
+ }
+ } else {
+ Some(DiffItem::Add(k, v))
+ }
+ }).collect();
+ assert_eq!(expected, diff);
+ }
+ }
+}
diff --git a/vendor/im-rc/src/ord/mod.rs b/vendor/im-rc/src/ord/mod.rs
new file mode 100644
index 000000000..27a56a5e2
--- /dev/null
+++ b/vendor/im-rc/src/ord/mod.rs
@@ -0,0 +1,8 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#[macro_use]
+pub mod map;
+#[macro_use]
+pub mod set;
diff --git a/vendor/im-rc/src/ord/set.rs b/vendor/im-rc/src/ord/set.rs
new file mode 100644
index 000000000..60ad6adcc
--- /dev/null
+++ b/vendor/im-rc/src/ord/set.rs
@@ -0,0 +1,1243 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+//! An ordered set.
+//!
+//! An immutable ordered set implemented as a [B-tree] [1].
+//!
+//! Most operations on this type of set are O(log n). A
+//! [`HashSet`][hashset::HashSet] is usually a better choice for
+//! performance, but the `OrdSet` has the advantage of only requiring
+//! an [`Ord`][std::cmp::Ord] constraint on its values, and of being
+//! ordered, so values always come out from lowest to highest, where a
+//! [`HashSet`][hashset::HashSet] has no guaranteed ordering.
+//!
+//! [1]: https://en.wikipedia.org/wiki/B-tree
+//! [hashset::HashSet]: ./struct.HashSet.html
+//! [std::cmp::Ord]: https://doc.rust-lang.org/std/cmp/trait.Ord.html
+
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::collections;
+use std::fmt::{Debug, Error, Formatter};
+use std::hash::{BuildHasher, Hash, Hasher};
+use std::iter::{FromIterator, IntoIterator, Sum};
+use std::ops::{Add, Deref, Mul, RangeBounds};
+
+use crate::hashset::HashSet;
+use crate::nodes::btree::{
+ BTreeValue, ConsumingIter as ConsumingNodeIter, DiffIter as NodeDiffIter, Insert,
+ Iter as NodeIter, Node, Remove,
+};
+#[cfg(has_specialisation)]
+use crate::util::linear_search_by;
+use crate::util::{Pool, PoolRef};
+
+pub use crate::nodes::btree::DiffItem;
+
+/// Construct a set from a sequence of values.
+///
+/// # Examples
+///
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::ordset::OrdSet;
+/// # fn main() {
+/// assert_eq!(
+/// ordset![1, 2, 3],
+/// OrdSet::from(vec![1, 2, 3])
+/// );
+/// # }
+/// ```
+#[macro_export]
+macro_rules! ordset {
+ () => { $crate::ordset::OrdSet::new() };
+
+ ( $($x:expr),* ) => {{
+ let mut l = $crate::ordset::OrdSet::new();
+ $(
+ l.insert($x);
+ )*
+ l
+ }};
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
+struct Value<A>(A);
+
+impl<A> Deref for Value<A> {
+ type Target = A;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+// FIXME lacking specialisation, we can't simply implement `BTreeValue`
+// for `A`, we have to use the `Value<A>` indirection.
+#[cfg(not(has_specialisation))]
+impl<A: Ord> BTreeValue for Value<A> {
+ type Key = A;
+
+ fn ptr_eq(&self, _other: &Self) -> bool {
+ false
+ }
+
+ fn search_key<BK>(slice: &[Self], key: &BK) -> Result<usize, usize>
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ slice.binary_search_by(|value| Self::Key::borrow(value).cmp(key))
+ }
+
+ fn search_value(slice: &[Self], key: &Self) -> Result<usize, usize> {
+ slice.binary_search_by(|value| value.cmp(key))
+ }
+
+ fn cmp_keys<BK>(&self, other: &BK) -> Ordering
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ Self::Key::borrow(self).cmp(other)
+ }
+
+ fn cmp_values(&self, other: &Self) -> Ordering {
+ self.cmp(other)
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<A: Ord> BTreeValue for Value<A> {
+ type Key = A;
+
+ fn ptr_eq(&self, _other: &Self) -> bool {
+ false
+ }
+
+ default fn search_key<BK>(slice: &[Self], key: &BK) -> Result<usize, usize>
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ slice.binary_search_by(|value| Self::Key::borrow(value).cmp(key))
+ }
+
+ default fn search_value(slice: &[Self], key: &Self) -> Result<usize, usize> {
+ slice.binary_search_by(|value| value.cmp(key))
+ }
+
+ fn cmp_keys<BK>(&self, other: &BK) -> Ordering
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ Self::Key::borrow(self).cmp(other)
+ }
+
+ fn cmp_values(&self, other: &Self) -> Ordering {
+ self.cmp(other)
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<A: Ord + Copy> BTreeValue for Value<A> {
+ fn search_key<BK>(slice: &[Self], key: &BK) -> Result<usize, usize>
+ where
+ BK: Ord + ?Sized,
+ Self::Key: Borrow<BK>,
+ {
+ linear_search_by(slice, |value| Self::Key::borrow(value).cmp(key))
+ }
+
+ fn search_value(slice: &[Self], key: &Self) -> Result<usize, usize> {
+ linear_search_by(slice, |value| value.cmp(key))
+ }
+}
+
+def_pool!(OrdSetPool<A>, Node<Value<A>>);
+
+/// An ordered set.
+///
+/// An immutable ordered set implemented as a [B-tree] [1].
+///
+/// Most operations on this type of set are O(log n). A
+/// [`HashSet`][hashset::HashSet] is usually a better choice for
+/// performance, but the `OrdSet` has the advantage of only requiring
+/// an [`Ord`][std::cmp::Ord] constraint on its values, and of being
+/// ordered, so values always come out from lowest to highest, where a
+/// [`HashSet`][hashset::HashSet] has no guaranteed ordering.
+///
+/// [1]: https://en.wikipedia.org/wiki/B-tree
+/// [hashset::HashSet]: ./struct.HashSet.html
+/// [std::cmp::Ord]: https://doc.rust-lang.org/std/cmp/trait.Ord.html
+pub struct OrdSet<A> {
+ size: usize,
+ pool: OrdSetPool<A>,
+ root: PoolRef<Node<Value<A>>>,
+}
+
+impl<A> OrdSet<A> {
+ /// Construct an empty set.
+ #[must_use]
+ pub fn new() -> Self {
+ let pool = OrdSetPool::default();
+ let root = PoolRef::default(&pool.0);
+ OrdSet {
+ size: 0,
+ pool,
+ root,
+ }
+ }
+
+ /// Construct an empty set using a specific memory pool.
+ #[cfg(feature = "pool")]
+ #[must_use]
+ pub fn with_pool(pool: &OrdSetPool<A>) -> Self {
+ let root = PoolRef::default(&pool.0);
+ OrdSet {
+ size: 0,
+ pool: pool.clone(),
+ root,
+ }
+ }
+
+ /// Construct a set with a single value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set = OrdSet::unit(123);
+ /// assert!(set.contains(&123));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn unit(a: A) -> Self {
+ let pool = OrdSetPool::default();
+ let root = PoolRef::new(&pool.0, Node::unit(Value(a)));
+ OrdSet {
+ size: 1,
+ pool,
+ root,
+ }
+ }
+
+ /// Test whether a set is empty.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// assert!(
+ /// !ordset![1, 2, 3].is_empty()
+ /// );
+ /// assert!(
+ /// OrdSet::<i32>::new().is_empty()
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Get the size of a set.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// assert_eq!(3, ordset![1, 2, 3].len());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.size
+ }
+
+ /// Test whether two sets refer to the same content in memory.
+ ///
+ /// This is true if the two sides are references to the same set,
+ /// or if the two sets refer to the same root node.
+ ///
+ /// This would return true if you're comparing a set to itself, or
+ /// if you're comparing a set to a fresh clone of itself.
+ ///
+ /// Time: O(1)
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ std::ptr::eq(self, other) || PoolRef::ptr_eq(&self.root, &other.root)
+ }
+
+ /// Get a reference to the memory pool used by this set.
+ ///
+ /// Note that if you didn't specifically construct it with a pool, you'll
+ /// get back a reference to a pool of size 0.
+ #[cfg(feature = "pool")]
+ pub fn pool(&self) -> &OrdSetPool<A> {
+ &self.pool
+ }
+
+ /// Discard all elements from the set.
+ ///
+ /// This leaves you with an empty set, and all elements that
+ /// were previously inside it are dropped.
+ ///
+ /// Time: O(n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdSet;
+ /// let mut set = ordset![1, 2, 3];
+ /// set.clear();
+ /// assert!(set.is_empty());
+ /// ```
+ pub fn clear(&mut self) {
+ if !self.is_empty() {
+ self.root = PoolRef::default(&self.pool.0);
+ self.size = 0;
+ }
+ }
+}
+
+impl<A> OrdSet<A>
+where
+ A: Ord,
+{
+ /// Get the smallest value in a set.
+ ///
+ /// If the set is empty, returns `None`.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn get_min(&self) -> Option<&A> {
+ self.root.min().map(Deref::deref)
+ }
+
+ /// Get the largest value in a set.
+ ///
+ /// If the set is empty, returns `None`.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn get_max(&self) -> Option<&A> {
+ self.root.max().map(Deref::deref)
+ }
+
+ /// Create an iterator over the contents of the set.
+ #[must_use]
+ pub fn iter(&self) -> Iter<'_, A> {
+ Iter {
+ it: NodeIter::new(&self.root, self.size, ..),
+ }
+ }
+
+ /// Create an iterator over a range inside the set.
+ #[must_use]
+ pub fn range<R, BA>(&self, range: R) -> RangedIter<'_, A>
+ where
+ R: RangeBounds<BA>,
+ A: Borrow<BA>,
+ BA: Ord + ?Sized,
+ {
+ RangedIter {
+ it: NodeIter::new(&self.root, self.size, range),
+ }
+ }
+
+ /// Get an iterator over the differences between this set and
+ /// another, i.e. the set of entries to add or remove to this set
+ /// in order to make it equal to the other set.
+ ///
+ /// This function will avoid visiting nodes which are shared
+ /// between the two sets, meaning that even very large sets can be
+ /// compared quickly if most of their structure is shared.
+ ///
+ /// Time: O(n) (where n is the number of unique elements across
+ /// the two sets, minus the number of elements belonging to nodes
+ /// shared between them)
+ #[must_use]
+ pub fn diff<'a>(&'a self, other: &'a Self) -> DiffIter<'_, A> {
+ DiffIter {
+ it: NodeDiffIter::new(&self.root, &other.root),
+ }
+ }
+
+ /// Test if a value is part of a set.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let mut set = ordset!{1, 2, 3};
+ /// assert!(set.contains(&1));
+ /// assert!(!set.contains(&4));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn contains<BA>(&self, a: &BA) -> bool
+ where
+ BA: Ord + ?Sized,
+ A: Borrow<BA>,
+ {
+ self.root.lookup(a).is_some()
+ }
+
+ /// Get the closest smaller value in a set to a given value.
+ ///
+ /// If the set contains the given value, this is returned.
+ /// Otherwise, the closest value in the set smaller than the
+ /// given value is returned. If the smallest value in the set
+ /// is larger than the given value, `None` is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdSet;
+ /// let set = ordset![1, 3, 5, 7, 9];
+ /// assert_eq!(Some(&5), set.get_prev(&6));
+ /// ```
+ #[must_use]
+ pub fn get_prev(&self, key: &A) -> Option<&A> {
+ self.root.lookup_prev(key).map(|v| &v.0)
+ }
+
+ /// Get the closest larger value in a set to a given value.
+ ///
+ /// If the set contains the given value, this is returned.
+ /// Otherwise, the closest value in the set larger than the
+ /// given value is returned. If the largest value in the set
+ /// is smaller than the given value, `None` is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::OrdSet;
+ /// let set = ordset![1, 3, 5, 7, 9];
+ /// assert_eq!(Some(&5), set.get_next(&4));
+ /// ```
+ #[must_use]
+ pub fn get_next(&self, key: &A) -> Option<&A> {
+ self.root.lookup_next(key).map(|v| &v.0)
+ }
+
+ /// Test whether a set is a subset of another set, meaning that
+ /// all values in our set must also be in the other set.
+ ///
+ /// Time: O(n log m) where m is the size of the other set
+ #[must_use]
+ pub fn is_subset<RS>(&self, other: RS) -> bool
+ where
+ RS: Borrow<Self>,
+ {
+ let other = other.borrow();
+ if other.len() < self.len() {
+ return false;
+ }
+ self.iter().all(|a| other.contains(a))
+ }
+
+ /// Test whether a set is a proper subset of another set, meaning
+ /// that all values in our set must also be in the other set. A
+ /// proper subset must also be smaller than the other set.
+ ///
+ /// Time: O(n log m) where m is the size of the other set
+ #[must_use]
+ pub fn is_proper_subset<RS>(&self, other: RS) -> bool
+ where
+ RS: Borrow<Self>,
+ {
+ self.len() != other.borrow().len() && self.is_subset(other)
+ }
+}
+
+impl<A> OrdSet<A>
+where
+ A: Ord + Clone,
+{
+ /// Insert a value into a set.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let mut set = ordset!{};
+ /// set.insert(123);
+ /// set.insert(456);
+ /// assert_eq!(
+ /// set,
+ /// ordset![123, 456]
+ /// );
+ /// ```
+ #[inline]
+ pub fn insert(&mut self, a: A) -> Option<A> {
+ let new_root = {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.insert(&self.pool.0, Value(a)) {
+ Insert::Replaced(Value(old_value)) => return Some(old_value),
+ Insert::Added => {
+ self.size += 1;
+ return None;
+ }
+ Insert::Split(left, median, right) => PoolRef::new(
+ &self.pool.0,
+ Node::new_from_split(&self.pool.0, left, median, right),
+ ),
+ }
+ };
+ self.size += 1;
+ self.root = new_root;
+ None
+ }
+
+ /// Remove a value from a set.
+ ///
+ /// Time: O(log n)
+ #[inline]
+ pub fn remove<BA>(&mut self, a: &BA) -> Option<A>
+ where
+ BA: Ord + ?Sized,
+ A: Borrow<BA>,
+ {
+ let (new_root, removed_value) = {
+ let root = PoolRef::make_mut(&self.pool.0, &mut self.root);
+ match root.remove(&self.pool.0, a) {
+ Remove::Update(value, root) => (PoolRef::new(&self.pool.0, root), Some(value.0)),
+ Remove::Removed(value) => {
+ self.size -= 1;
+ return Some(value.0);
+ }
+ Remove::NoChange => return None,
+ }
+ };
+ self.size -= 1;
+ self.root = new_root;
+ removed_value
+ }
+
+ /// Remove the smallest value from a set.
+ ///
+ /// Time: O(log n)
+ pub fn remove_min(&mut self) -> Option<A> {
+ // FIXME implement this at the node level for better efficiency
+ let key = match self.get_min() {
+ None => return None,
+ Some(v) => v,
+ }
+ .clone();
+ self.remove(&key)
+ }
+
+ /// Remove the largest value from a set.
+ ///
+ /// Time: O(log n)
+ pub fn remove_max(&mut self) -> Option<A> {
+ // FIXME implement this at the node level for better efficiency
+ let key = match self.get_max() {
+ None => return None,
+ Some(v) => v,
+ }
+ .clone();
+ self.remove(&key)
+ }
+
+ /// Construct a new set from the current set with the given value
+ /// added.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set = ordset![456];
+ /// assert_eq!(
+ /// set.update(123),
+ /// ordset![123, 456]
+ /// );
+ /// ```
+ #[must_use]
+ pub fn update(&self, a: A) -> Self {
+ let mut out = self.clone();
+ out.insert(a);
+ out
+ }
+
+ /// Construct a new set with the given value removed if it's in
+ /// the set.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn without<BA>(&self, a: &BA) -> Self
+ where
+ BA: Ord + ?Sized,
+ A: Borrow<BA>,
+ {
+ let mut out = self.clone();
+ out.remove(a);
+ out
+ }
+
+ /// Remove the smallest value from a set, and return that value as
+ /// well as the updated set.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn without_min(&self) -> (Option<A>, Self) {
+ match self.get_min() {
+ Some(v) => (Some(v.clone()), self.without(v)),
+ None => (None, self.clone()),
+ }
+ }
+
+ /// Remove the largest value from a set, and return that value as
+ /// well as the updated set.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn without_max(&self) -> (Option<A>, Self) {
+ match self.get_max() {
+ Some(v) => (Some(v.clone()), self.without(v)),
+ None => (None, self.clone()),
+ }
+ }
+
+ /// Construct the union of two sets.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set1 = ordset!{1, 2};
+ /// let set2 = ordset!{2, 3};
+ /// let expected = ordset!{1, 2, 3};
+ /// assert_eq!(expected, set1.union(set2));
+ /// ```
+ #[must_use]
+ pub fn union(self, other: Self) -> Self {
+ let (mut to_mutate, to_consume) = if self.len() >= other.len() {
+ (self, other)
+ } else {
+ (other, self)
+ };
+ for value in to_consume {
+ to_mutate.insert(value);
+ }
+ to_mutate
+ }
+
+ /// Construct the union of multiple sets.
+ ///
+ /// Time: O(n log n)
+ #[must_use]
+ pub fn unions<I>(i: I) -> Self
+ where
+ I: IntoIterator<Item = Self>,
+ {
+ i.into_iter().fold(Self::default(), Self::union)
+ }
+
+ /// Construct the symmetric difference between two sets.
+ ///
+ /// This is an alias for the
+ /// [`symmetric_difference`][symmetric_difference] method.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set1 = ordset!{1, 2};
+ /// let set2 = ordset!{2, 3};
+ /// let expected = ordset!{1, 3};
+ /// assert_eq!(expected, set1.difference(set2));
+ /// ```
+ ///
+ /// [symmetric_difference]: #method.symmetric_difference
+ #[must_use]
+ pub fn difference(self, other: Self) -> Self {
+ self.symmetric_difference(other)
+ }
+
+ /// Construct the symmetric difference between two sets.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set1 = ordset!{1, 2};
+ /// let set2 = ordset!{2, 3};
+ /// let expected = ordset!{1, 3};
+ /// assert_eq!(expected, set1.symmetric_difference(set2));
+ /// ```
+ #[must_use]
+ pub fn symmetric_difference(mut self, other: Self) -> Self {
+ for value in other {
+ if self.remove(&value).is_none() {
+ self.insert(value);
+ }
+ }
+ self
+ }
+
+ /// Construct the relative complement between two sets, that is the set
+ /// of values in `self` that do not occur in `other`.
+ ///
+ /// Time: O(m log n) where m is the size of the other set
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set1 = ordset!{1, 2};
+ /// let set2 = ordset!{2, 3};
+ /// let expected = ordset!{1};
+ /// assert_eq!(expected, set1.relative_complement(set2));
+ /// ```
+ #[must_use]
+ pub fn relative_complement(mut self, other: Self) -> Self {
+ for value in other {
+ let _ = self.remove(&value);
+ }
+ self
+ }
+
+ /// Construct the intersection of two sets.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::ordset::OrdSet;
+ /// let set1 = ordset!{1, 2};
+ /// let set2 = ordset!{2, 3};
+ /// let expected = ordset!{2};
+ /// assert_eq!(expected, set1.intersection(set2));
+ /// ```
+ #[must_use]
+ pub fn intersection(self, other: Self) -> Self {
+ let mut out = Self::default();
+ for value in other {
+ if self.contains(&value) {
+ out.insert(value);
+ }
+ }
+ out
+ }
+
+ /// Split a set into two, with the left hand set containing values
+ /// which are smaller than `split`, and the right hand set
+ /// containing values which are larger than `split`.
+ ///
+ /// The `split` value itself is discarded.
+ ///
+ /// Time: O(n)
+ #[must_use]
+ pub fn split<BA>(self, split: &BA) -> (Self, Self)
+ where
+ BA: Ord + ?Sized,
+ A: Borrow<BA>,
+ {
+ let (left, _, right) = self.split_member(split);
+ (left, right)
+ }
+
+ /// Split a set into two, with the left hand set containing values
+ /// which are smaller than `split`, and the right hand set
+ /// containing values which are larger than `split`.
+ ///
+ /// Returns a tuple of the two sets and a boolean which is true if
+ /// the `split` value existed in the original set, and false
+ /// otherwise.
+ ///
+ /// Time: O(n)
+ #[must_use]
+ pub fn split_member<BA>(self, split: &BA) -> (Self, bool, Self)
+ where
+ BA: Ord + ?Sized,
+ A: Borrow<BA>,
+ {
+ let mut left = Self::default();
+ let mut right = Self::default();
+ let mut present = false;
+ for value in self {
+ match value.borrow().cmp(split) {
+ Ordering::Less => {
+ left.insert(value);
+ }
+ Ordering::Equal => {
+ present = true;
+ }
+ Ordering::Greater => {
+ right.insert(value);
+ }
+ }
+ }
+ (left, present, right)
+ }
+
+ /// Construct a set with only the `n` smallest values from a given
+ /// set.
+ ///
+ /// Time: O(n)
+ #[must_use]
+ pub fn take(&self, n: usize) -> Self {
+ self.iter().take(n).cloned().collect()
+ }
+
+ /// Construct a set with the `n` smallest values removed from a
+ /// given set.
+ ///
+ /// Time: O(n)
+ #[must_use]
+ pub fn skip(&self, n: usize) -> Self {
+ self.iter().skip(n).cloned().collect()
+ }
+}
+
+// Core traits
+
+impl<A> Clone for OrdSet<A> {
+ /// Clone a set.
+ ///
+ /// Time: O(1)
+ #[inline]
+ fn clone(&self) -> Self {
+ OrdSet {
+ size: self.size,
+ pool: self.pool.clone(),
+ root: self.root.clone(),
+ }
+ }
+}
+
+impl<A: Ord> PartialEq for OrdSet<A> {
+ fn eq(&self, other: &Self) -> bool {
+ PoolRef::ptr_eq(&self.root, &other.root)
+ || (self.len() == other.len() && self.diff(other).next().is_none())
+ }
+}
+
+impl<A: Ord + Eq> Eq for OrdSet<A> {}
+
+impl<A: Ord> PartialOrd for OrdSet<A> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+impl<A: Ord> Ord for OrdSet<A> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+impl<A: Ord + Hash> Hash for OrdSet<A> {
+ fn hash<H>(&self, state: &mut H)
+ where
+ H: Hasher,
+ {
+ for i in self.iter() {
+ i.hash(state);
+ }
+ }
+}
+
+impl<A> Default for OrdSet<A> {
+ fn default() -> Self {
+ OrdSet::new()
+ }
+}
+
+impl<A: Ord + Clone> Add for OrdSet<A> {
+ type Output = OrdSet<A>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.union(other)
+ }
+}
+
+impl<'a, A: Ord + Clone> Add for &'a OrdSet<A> {
+ type Output = OrdSet<A>;
+
+ fn add(self, other: Self) -> Self::Output {
+ self.clone().union(other.clone())
+ }
+}
+
+impl<A: Ord + Clone> Mul for OrdSet<A> {
+ type Output = OrdSet<A>;
+
+ fn mul(self, other: Self) -> Self::Output {
+ self.intersection(other)
+ }
+}
+
+impl<'a, A: Ord + Clone> Mul for &'a OrdSet<A> {
+ type Output = OrdSet<A>;
+
+ fn mul(self, other: Self) -> Self::Output {
+ self.clone().intersection(other.clone())
+ }
+}
+
+impl<A: Ord + Clone> Sum for OrdSet<A> {
+ fn sum<I>(it: I) -> Self
+ where
+ I: Iterator<Item = Self>,
+ {
+ it.fold(Self::new(), |a, b| a + b)
+ }
+}
+
+impl<A, R> Extend<R> for OrdSet<A>
+where
+ A: Ord + Clone + From<R>,
+{
+ fn extend<I>(&mut self, iter: I)
+ where
+ I: IntoIterator<Item = R>,
+ {
+ for value in iter {
+ self.insert(From::from(value));
+ }
+ }
+}
+
+impl<A: Ord + Debug> Debug for OrdSet<A> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+// Iterators
+
+/// An iterator over the elements of a set.
+pub struct Iter<'a, A> {
+ it: NodeIter<'a, Value<A>>,
+}
+
+impl<'a, A> Iterator for Iter<'a, A>
+where
+ A: 'a + Ord,
+{
+ type Item = &'a A;
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(Deref::deref)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.it.remaining, Some(self.it.remaining))
+ }
+}
+
+impl<'a, A> DoubleEndedIterator for Iter<'a, A>
+where
+ A: 'a + Ord,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.it.next_back().map(Deref::deref)
+ }
+}
+
+impl<'a, A> ExactSizeIterator for Iter<'a, A> where A: 'a + Ord {}
+
+/// A ranged iterator over the elements of a set.
+///
+/// The only difference from `Iter` is that this one doesn't implement
+/// `ExactSizeIterator` because we can't know the size of the range without first
+/// iterating over it to count.
+pub struct RangedIter<'a, A> {
+ it: NodeIter<'a, Value<A>>,
+}
+
+impl<'a, A> Iterator for RangedIter<'a, A>
+where
+ A: 'a + Ord,
+{
+ type Item = &'a A;
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(Deref::deref)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'a, A> DoubleEndedIterator for RangedIter<'a, A>
+where
+ A: 'a + Ord,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.it.next_back().map(Deref::deref)
+ }
+}
+
+/// A consuming iterator over the elements of a set.
+pub struct ConsumingIter<A> {
+ it: ConsumingNodeIter<Value<A>>,
+}
+
+impl<A> Iterator for ConsumingIter<A>
+where
+ A: Ord + Clone,
+{
+ type Item = A;
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|v| v.0)
+ }
+}
+
+/// An iterator over the difference between two sets.
+pub struct DiffIter<'a, A> {
+ it: NodeDiffIter<'a, Value<A>>,
+}
+
+impl<'a, A> Iterator for DiffIter<'a, A>
+where
+ A: Ord + PartialEq,
+{
+ type Item = DiffItem<'a, A>;
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ self.it.next().map(|item| match item {
+ DiffItem::Add(v) => DiffItem::Add(v.deref()),
+ DiffItem::Update { old, new } => DiffItem::Update {
+ old: old.deref(),
+ new: new.deref(),
+ },
+ DiffItem::Remove(v) => DiffItem::Remove(v.deref()),
+ })
+ }
+}
+
+impl<A, R> FromIterator<R> for OrdSet<A>
+where
+ A: Ord + Clone + From<R>,
+{
+ fn from_iter<T>(i: T) -> Self
+ where
+ T: IntoIterator<Item = R>,
+ {
+ let mut out = Self::new();
+ for item in i {
+ out.insert(From::from(item));
+ }
+ out
+ }
+}
+
+impl<'a, A> IntoIterator for &'a OrdSet<A>
+where
+ A: 'a + Ord,
+{
+ type Item = &'a A;
+ type IntoIter = Iter<'a, A>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<A> IntoIterator for OrdSet<A>
+where
+ A: Ord + Clone,
+{
+ type Item = A;
+ type IntoIter = ConsumingIter<A>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ ConsumingIter {
+ it: ConsumingNodeIter::new(&self.root, self.size),
+ }
+ }
+}
+
+// Conversions
+
+impl<'s, 'a, A, OA> From<&'s OrdSet<&'a A>> for OrdSet<OA>
+where
+ A: ToOwned<Owned = OA> + Ord + ?Sized,
+ OA: Borrow<A> + Ord + Clone,
+{
+ fn from(set: &OrdSet<&A>) -> Self {
+ set.iter().map(|a| (*a).to_owned()).collect()
+ }
+}
+
+impl<'a, A> From<&'a [A]> for OrdSet<A>
+where
+ A: Ord + Clone,
+{
+ fn from(slice: &'a [A]) -> Self {
+ slice.iter().cloned().collect()
+ }
+}
+
+impl<A: Ord + Clone> From<Vec<A>> for OrdSet<A> {
+ fn from(vec: Vec<A>) -> Self {
+ vec.into_iter().collect()
+ }
+}
+
+impl<'a, A: Ord + Clone> From<&'a Vec<A>> for OrdSet<A> {
+ fn from(vec: &Vec<A>) -> Self {
+ vec.iter().cloned().collect()
+ }
+}
+
+impl<A: Eq + Hash + Ord + Clone> From<collections::HashSet<A>> for OrdSet<A> {
+ fn from(hash_set: collections::HashSet<A>) -> Self {
+ hash_set.into_iter().collect()
+ }
+}
+
+impl<'a, A: Eq + Hash + Ord + Clone> From<&'a collections::HashSet<A>> for OrdSet<A> {
+ fn from(hash_set: &collections::HashSet<A>) -> Self {
+ hash_set.iter().cloned().collect()
+ }
+}
+
+impl<A: Ord + Clone> From<collections::BTreeSet<A>> for OrdSet<A> {
+ fn from(btree_set: collections::BTreeSet<A>) -> Self {
+ btree_set.into_iter().collect()
+ }
+}
+
+impl<'a, A: Ord + Clone> From<&'a collections::BTreeSet<A>> for OrdSet<A> {
+ fn from(btree_set: &collections::BTreeSet<A>) -> Self {
+ btree_set.iter().cloned().collect()
+ }
+}
+
+impl<A: Hash + Eq + Ord + Clone, S: BuildHasher> From<HashSet<A, S>> for OrdSet<A> {
+ fn from(hashset: HashSet<A, S>) -> Self {
+ hashset.into_iter().collect()
+ }
+}
+
+impl<'a, A: Hash + Eq + Ord + Clone, S: BuildHasher> From<&'a HashSet<A, S>> for OrdSet<A> {
+ fn from(hashset: &HashSet<A, S>) -> Self {
+ hashset.into_iter().cloned().collect()
+ }
+}
+
+// Proptest
+#[cfg(any(test, feature = "proptest"))]
+#[doc(hidden)]
+pub mod proptest {
+ #[deprecated(
+ since = "14.3.0",
+ note = "proptest strategies have moved to im::proptest"
+ )]
+ pub use crate::proptest::ord_set;
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::proptest::*;
+ use ::proptest::proptest;
+
+ #[test]
+ fn match_strings_with_string_slices() {
+ let mut set: OrdSet<String> = From::from(&ordset!["foo", "bar"]);
+ set = set.without("bar");
+ assert!(!set.contains("bar"));
+ set.remove("foo");
+ assert!(!set.contains("foo"));
+ }
+
+ #[test]
+ fn ranged_iter() {
+ let set: OrdSet<i32> = ordset![1, 2, 3, 4, 5];
+ let range: Vec<i32> = set.range(..).cloned().collect();
+ assert_eq!(vec![1, 2, 3, 4, 5], range);
+ let range: Vec<i32> = set.range(..).rev().cloned().collect();
+ assert_eq!(vec![5, 4, 3, 2, 1], range);
+ let range: Vec<i32> = set.range(2..5).cloned().collect();
+ assert_eq!(vec![2, 3, 4], range);
+ let range: Vec<i32> = set.range(2..5).rev().cloned().collect();
+ assert_eq!(vec![4, 3, 2], range);
+ let range: Vec<i32> = set.range(3..).cloned().collect();
+ assert_eq!(vec![3, 4, 5], range);
+ let range: Vec<i32> = set.range(3..).rev().cloned().collect();
+ assert_eq!(vec![5, 4, 3], range);
+ let range: Vec<i32> = set.range(..4).cloned().collect();
+ assert_eq!(vec![1, 2, 3], range);
+ let range: Vec<i32> = set.range(..4).rev().cloned().collect();
+ assert_eq!(vec![3, 2, 1], range);
+ let range: Vec<i32> = set.range(..=3).cloned().collect();
+ assert_eq!(vec![1, 2, 3], range);
+ let range: Vec<i32> = set.range(..=3).rev().cloned().collect();
+ assert_eq!(vec![3, 2, 1], range);
+ }
+
+ proptest! {
+ #[test]
+ fn proptest_a_set(ref s in ord_set(".*", 10..100)) {
+ assert!(s.len() < 100);
+ assert!(s.len() >= 10);
+ }
+
+ #[test]
+ fn long_ranged_iter(max in 1..1000) {
+ let range = 0..max;
+ let expected: Vec<i32> = range.clone().collect();
+ let set: OrdSet<i32> = range.clone().collect::<OrdSet<_>>();
+ let result: Vec<i32> = set.range(..).cloned().collect();
+ assert_eq!(expected, result);
+
+ let expected: Vec<i32> = range.clone().rev().collect();
+ let set: OrdSet<i32> = range.collect::<OrdSet<_>>();
+ let result: Vec<i32> = set.range(..).rev().cloned().collect();
+ assert_eq!(expected, result);
+ }
+ }
+}
diff --git a/vendor/im-rc/src/ord/test-fixtures/issue_124.txt b/vendor/im-rc/src/ord/test-fixtures/issue_124.txt
new file mode 100644
index 000000000..51786177e
--- /dev/null
+++ b/vendor/im-rc/src/ord/test-fixtures/issue_124.txt
@@ -0,0 +1,3492 @@
+insert 1495
+insert 1568
+insert 1313
+insert 824
+insert 926
+insert 3031
+insert 872
+insert 1330
+insert 2356
+insert 298
+insert 1957
+insert 2133
+insert 3295
+insert 1139
+insert 2895
+insert 2442
+insert 553
+insert 2637
+insert 2571
+insert 352
+insert 1076
+insert 1611
+insert 902
+insert 480
+insert 1489
+insert 3223
+insert 169
+insert 2912
+insert 2276
+insert 1512
+insert 291
+insert 137
+insert 2917
+insert 2509
+insert 1729
+insert 62
+insert 1381
+insert 647
+insert 1647
+insert 2064
+insert 1847
+insert 1618
+insert 528
+insert 431
+insert 639
+insert 1910
+insert 1764
+insert 114
+insert 2872
+insert 2911
+insert 999
+insert 15
+insert 53
+insert 1924
+insert 2195
+insert 1134
+insert 269
+insert 2903
+insert 432
+insert 149
+insert 1241
+insert 3266
+insert 1975
+insert 2095
+insert 1384
+insert 2858
+insert 2814
+insert 2735
+insert 2779
+insert 991
+insert 1725
+insert 1804
+insert 959
+insert 1395
+insert 720
+insert 1758
+insert 1459
+insert 925
+insert 860
+insert 1035
+insert 1310
+insert 2892
+insert 3129
+insert 891
+insert 913
+insert 2136
+insert 45
+insert 255
+insert 2980
+insert 2918
+insert 2234
+insert 2845
+insert 2135
+insert 2818
+insert 978
+insert 2038
+insert 2251
+insert 14
+insert 333
+insert 649
+insert 1947
+insert 1768
+insert 3309
+insert 3063
+insert 788
+insert 65
+insert 833
+insert 1038
+insert 1966
+insert 1746
+insert 1595
+insert 2512
+insert 1543
+insert 1269
+insert 243
+insert 175
+insert 259
+insert 4
+insert 2715
+insert 297
+insert 2386
+insert 1060
+insert 2686
+insert 2400
+insert 2548
+insert 278
+insert 1890
+insert 1777
+insert 1424
+insert 2109
+insert 3307
+insert 1974
+insert 1985
+insert 3144
+insert 1186
+insert 1945
+insert 3184
+insert 1488
+insert 1707
+insert 2915
+insert 2995
+insert 2467
+insert 1791
+insert 1309
+insert 2916
+insert 1941
+insert 1824
+insert 1525
+insert 1626
+insert 1687
+insert 2333
+insert 2198
+insert 1237
+insert 2931
+insert 2764
+insert 2609
+insert 1202
+insert 1314
+insert 1556
+insert 1892
+insert 2329
+insert 2065
+insert 1559
+insert 2282
+insert 3279
+insert 1651
+insert 1610
+insert 2039
+insert 3269
+insert 1948
+insert 1663
+insert 1627
+insert 943
+insert 2313
+insert 1048
+insert 2399
+insert 2302
+insert 1022
+insert 946
+insert 2303
+insert 1085
+insert 108
+insert 2738
+insert 1311
+insert 844
+insert 1166
+insert 3198
+insert 1093
+insert 1192
+insert 1508
+insert 2471
+insert 2518
+insert 3209
+insert 1149
+insert 1743
+insert 1770
+insert 960
+insert 895
+insert 1806
+insert 1137
+insert 1654
+insert 1386
+insert 2727
+insert 1379
+insert 2572
+insert 128
+insert 2476
+insert 182
+insert 1037
+insert 605
+insert 2494
+insert 2561
+insert 941
+insert 2748
+insert 1448
+insert 2260
+insert 1273
+insert 2558
+insert 1198
+insert 1774
+insert 1740
+insert 1861
+insert 3049
+insert 2821
+insert 1341
+insert 1661
+insert 1506
+insert 1741
+insert 1811
+insert 1737
+insert 1693
+insert 1877
+insert 1756
+insert 1755
+insert 1553
+insert 1864
+insert 1734
+insert 1790
+insert 1753
+insert 2396
+insert 2531
+insert 1878
+insert 1809
+insert 2692
+insert 1854
+insert 1960
+insert 1999
+insert 2028
+insert 1879
+insert 1590
+insert 2521
+insert 3081
+insert 2665
+insert 1638
+insert 1667
+insert 1417
+insert 1876
+insert 3113
+insert 2757
+insert 2711
+insert 2587
+insert 1607
+insert 2568
+insert 2724
+insert 2685
+insert 2523
+insert 3151
+insert 1530
+insert 2454
+insert 2539
+insert 1356
+insert 1885
+insert 769
+insert 1982
+insert 3059
+insert 1232
+insert 2773
+insert 3270
+insert 2599
+insert 294
+insert 164
+insert 22
+insert 239
+insert 648
+insert 1679
+insert 644
+insert 398
+insert 455
+insert 443
+insert 1686
+insert 758
+insert 1720
+insert 1387
+insert 672
+insert 2569
+insert 2921
+insert 1228
+insert 1521
+insert 2115
+insert 2101
+insert 3035
+insert 2088
+insert 2094
+insert 1290
+insert 940
+insert 1303
+insert 2515
+insert 2863
+insert 1319
+insert 1315
+insert 2445
+insert 1949
+insert 2935
+insert 1992
+insert 1435
+insert 1413
+insert 1742
+insert 2769
+insert 2266
+insert 3290
+insert 3224
+insert 3171
+insert 2981
+insert 3200
+insert 3140
+insert 493
+insert 1299
+insert 338
+insert 1639
+insert 1463
+insert 730
+insert 1779
+insert 1918
+insert 2804
+insert 1476
+remove 432
+insert 296
+insert 1469
+insert 1364
+insert 1732
+insert 1908
+insert 2076
+insert 2489
+insert 2401
+insert 2560
+insert 2299
+insert 1451
+insert 3303
+insert 2174
+insert 2048
+insert 3293
+insert 2097
+insert 1705
+insert 1731
+insert 2296
+insert 2925
+insert 2932
+insert 1711
+insert 2397
+insert 2520
+insert 2742
+insert 1324
+insert 1160
+insert 1458
+insert 1439
+insert 3277
+insert 1374
+insert 1217
+insert 1218
+insert 2197
+insert 2185
+insert 1952
+insert 1798
+insert 1442
+insert 2601
+insert 2675
+insert 2283
+insert 12
+insert 567
+insert 638
+insert 1904
+insert 704
+insert 1447
+insert 1965
+insert 2728
+insert 2741
+insert 2441
+insert 2122
+insert 862
+insert 958
+insert 1955
+insert 850
+insert 2696
+insert 955
+insert 2867
+insert 2042
+insert 1925
+insert 2952
+insert 2265
+insert 1220
+insert 3289
+insert 2650
+insert 2215
+insert 2288
+insert 1373
+insert 2096
+insert 1167
+insert 830
+insert 829
+insert 1558
+insert 1189
+insert 2263
+insert 2341
+insert 2337
+insert 2270
+insert 3024
+insert 3182
+insert 1420
+insert 1557
+insert 1441
+insert 3037
+insert 3206
+insert 2913
+insert 1398
+insert 1392
+insert 1493
+insert 843
+insert 1287
+insert 2199
+insert 2838
+insert 2225
+insert 2181
+insert 736
+insert 2201
+insert 2907
+insert 808
+insert 712
+insert 2165
+insert 1219
+insert 1221
+insert 1715
+insert 1710
+insert 1700
+insert 2218
+insert 2187
+insert 1204
+insert 1709
+insert 2278
+insert 2698
+insert 1718
+insert 2240
+insert 2704
+insert 2340
+insert 1168
+insert 2256
+insert 2227
+insert 2203
+insert 2848
+insert 2305
+insert 1698
+insert 2647
+insert 2342
+insert 2275
+insert 1307
+insert 3062
+insert 2354
+insert 1312
+insert 3074
+insert 3060
+insert 1665
+insert 989
+insert 2591
+insert 3076
+insert 3136
+insert 3117
+insert 3123
+insert 234
+insert 6
+insert 698
+insert 719
+insert 3156
+insert 2358
+insert 2819
+insert 1793
+insert 3235
+insert 3000
+insert 28
+insert 3304
+insert 1856
+insert 1795
+insert 1733
+insert 1803
+insert 3127
+insert 1866
+insert 1642
+insert 1657
+insert 1723
+insert 2966
+insert 927
+insert 1055
+insert 3078
+insert 3201
+insert 3226
+insert 1014
+insert 3208
+insert 3262
+insert 2567
+insert 1430
+insert 2781
+insert 2425
+insert 1185
+insert 2385
+insert 1724
+insert 3181
+insert 2365
+insert 2361
+insert 1787
+insert 1410
+insert 1172
+insert 2746
+insert 2573
+insert 90
+insert 2268
+insert 2756
+insert 1304
+insert 1203
+insert 1860
+insert 841
+insert 1913
+insert 2284
+insert 2309
+insert 2946
+insert 1697
+remove 1975
+insert 2087
+insert 1921
+insert 3281
+insert 1894
+insert 1905
+insert 1206
+insert 1552
+insert 1179
+insert 2269
+insert 1606
+insert 1223
+insert 634
+insert 2594
+insert 1426
+insert 1164
+insert 1129
+insert 1019
+insert 897
+insert 2632
+insert 523
+insert 405
+insert 531
+insert 541
+insert 2285
+insert 1210
+insert 1212
+insert 2018
+insert 1524
+insert 1175
+insert 1857
+insert 1701
+insert 1712
+insert 1597
+insert 2737
+insert 363
+insert 3118
+insert 2006
+insert 2314
+insert 1205
+insert 1922
+insert 1293
+insert 1169
+insert 613
+insert 740
+insert 2678
+insert 655
+insert 385
+insert 1200
+insert 494
+insert 799
+insert 821
+insert 1282
+insert 2747
+insert 892
+insert 2656
+insert 1735
+insert 1766
+insert 2379
+insert 620
+insert 1776
+insert 1660
+insert 2999
+insert 3012
+insert 747
+insert 1797
+insert 1843
+insert 2074
+insert 1046
+insert 1194
+insert 136
+insert 551
+insert 147
+insert 41
+insert 429
+insert 198
+insert 272
+insert 459
+insert 2297
+insert 3174
+insert 1281
+insert 1274
+insert 1225
+insert 1780
+insert 1814
+insert 1726
+insert 1251
+insert 1305
+insert 1826
+insert 1859
+insert 1226
+insert 1187
+insert 2129
+insert 2853
+insert 3164
+insert 1497
+insert 2910
+insert 187
+insert 985
+insert 1414
+insert 2720
+insert 3299
+insert 2866
+insert 2850
+insert 1207
+insert 2308
+insert 879
+insert 2553
+insert 2554
+insert 1796
+insert 1802
+insert 1881
+insert 1738
+insert 1432
+insert 1317
+insert 2143
+insert 1747
+insert 2108
+insert 1406
+insert 3187
+insert 3065
+insert 3214
+insert 2364
+insert 2646
+insert 1942
+insert 25
+insert 162
+insert 469
+insert 511
+insert 370
+insert 377
+insert 3175
+insert 1609
+insert 2873
+insert 2802
+insert 3071
+insert 2623
+insert 2689
+insert 1358
+insert 2610
+insert 1903
+insert 2885
+insert 2036
+insert 2113
+insert 2784
+insert 2588
+insert 945
+insert 2744
+insert 190
+insert 2869
+insert 1505
+remove 2912
+insert 117
+insert 155
+insert 2723
+insert 1279
+insert 2801
+insert 2216
+insert 1736
+insert 688
+insert 3026
+insert 10
+insert 2771
+insert 2739
+insert 2162
+insert 546
+insert 1526
+insert 1501
+insert 1584
+insert 1541
+insert 1471
+insert 1548
+insert 508
+insert 2086
+insert 1858
+insert 2718
+insert 399
+insert 140
+insert 109
+insert 1869
+insert 560
+insert 1769
+insert 871
+insert 450
+insert 555
+insert 382
+insert 911
+insert 442
+insert 2236
+insert 2798
+insert 2069
+insert 795
+insert 931
+insert 1291
+insert 1275
+insert 576
+insert 2209
+insert 2212
+insert 2759
+insert 2248
+insert 2061
+insert 2224
+insert 2237
+insert 2525
+insert 2574
+insert 2244
+insert 2562
+insert 2196
+insert 3052
+insert 1266
+insert 1998
+insert 3020
+insert 3038
+insert 832
+insert 822
+insert 1359
+insert 2923
+insert 2063
+insert 2147
+insert 1976
+insert 1328
+insert 2976
+insert 1090
+insert 988
+insert 2761
+insert 1066
+insert 1719
+insert 126
+insert 151
+insert 1339
+insert 2158
+insert 1690
+insert 1692
+insert 2073
+insert 1829
+insert 1896
+insert 2005
+insert 2719
+insert 1376
+insert 2001
+insert 2057
+insert 2035
+insert 2924
+insert 1936
+insert 1461
+insert 2168
+insert 2245
+insert 1429
+insert 1409
+insert 2406
+insert 1318
+insert 1648
+insert 2843
+insert 3018
+insert 3011
+insert 3161
+insert 3028
+insert 2497
+insert 231
+insert 191
+insert 286
+insert 2188
+insert 2841
+insert 2228
+insert 2140
+insert 3133
+insert 1450
+insert 2103
+insert 1473
+insert 1487
+insert 3072
+insert 3093
+insert 3056
+insert 2894
+insert 1416
+insert 2897
+insert 2429
+insert 1956
+insert 1934
+insert 1940
+insert 1929
+insert 3084
+insert 2740
+insert 3298
+insert 3308
+insert 3247
+insert 2832
+insert 2163
+remove 2039
+insert 1899
+insert 1939
+insert 1399
+insert 1412
+insert 1365
+insert 2876
+insert 1897
+insert 2037
+insert 1930
+insert 1640
+insert 1931
+insert 3159
+insert 2899
+insert 2231
+insert 3147
+insert 2825
+insert 1243
+insert 1810
+insert 1872
+insert 3116
+insert 3165
+insert 2068
+insert 1874
+insert 2226
+insert 2250
+insert 2249
+insert 618
+insert 1370
+remove 1220
+insert 1354
+insert 1438
+insert 2213
+insert 1377
+insert 563
+insert 637
+insert 1933
+insert 734
+insert 761
+insert 29
+insert 2114
+insert 2194
+insert 2152
+insert 2217
+insert 314
+insert 440
+insert 393
+insert 670
+insert 785
+insert 2455
+insert 729
+insert 2271
+insert 2463
+insert 2481
+insert 2519
+insert 2488
+insert 1502
+insert 2485
+insert 2279
+insert 2638
+insert 656
+insert 574
+insert 2508
+insert 612
+insert 877
+insert 683
+insert 938
+insert 1298
+insert 1257
+insert 550
+insert 2117
+insert 2603
+insert 2702
+insert 2480
+insert 2941
+insert 2613
+insert 2281
+insert 1006
+insert 1016
+insert 2961
+insert 3033
+insert 1534
+insert 2962
+insert 2357
+insert 1216
+insert 1197
+insert 968
+insert 1002
+insert 1030
+insert 1003
+insert 1026
+insert 2708
+insert 2666
+insert 2542
+insert 2736
+insert 1870
+insert 2318
+insert 2344
+insert 3220
+insert 2635
+insert 3294
+insert 1183
+insert 2347
+insert 1396
+insert 2731
+insert 2164
+insert 2167
+insert 1411
+insert 2159
+insert 1694
+insert 1689
+insert 1714
+insert 1713
+insert 2373
+insert 2763
+insert 2820
+insert 3083
+insert 3030
+insert 3160
+insert 3010
+insert 3054
+insert 3112
+insert 3130
+insert 3095
+insert 3090
+insert 3158
+insert 3242
+insert 1691
+insert 3211
+insert 2418
+insert 1666
+insert 971
+insert 1695
+insert 1634
+insert 888
+insert 2794
+insert 2532
+insert 2887
+insert 2797
+insert 1316
+insert 2125
+insert 1696
+insert 1888
+insert 1706
+insert 1231
+insert 1234
+insert 2050
+insert 1235
+insert 2102
+insert 218
+insert 228
+insert 2461
+insert 3291
+insert 3267
+insert 1708
+insert 1721
+insert 1704
+insert 2792
+insert 2782
+insert 83
+insert 2906
+insert 1981
+insert 1245
+insert 1326
+insert 2622
+insert 2254
+insert 184
+insert 2750
+insert 2618
+insert 2679
+insert 2778
+insert 2628
+insert 855
+insert 2092
+insert 2118
+insert 1347
+insert 1527
+insert 2575
+insert 2119
+insert 2098
+insert 537
+insert 1623
+insert 2402
+insert 2054
+insert 193
+insert 1180
+insert 1378
+insert 1483
+insert 1188
+insert 2099
+insert 1184
+insert 1480
+insert 233
+insert 170
+insert 268
+insert 220
+insert 2261
+insert 1425
+insert 1932
+insert 1938
+insert 3047
+insert 1440
+insert 3105
+insert 3039
+insert 1443
+insert 1978
+insert 1996
+insert 1542
+insert 1547
+insert 1423
+insert 180
+insert 1585
+insert 1431
+insert 1586
+insert 2100
+insert 1444
+insert 2107
+insert 2896
+insert 1445
+insert 2743
+insert 1109
+insert 2751
+insert 2315
+insert 2634
+insert 1446
+insert 3288
+insert 1835
+insert 253
+insert 1504
+insert 371
+insert 504
+insert 204
+insert 2617
+insert 2388
+insert 107
+insert 131
+insert 111
+insert 1523
+insert 2909
+insert 1703
+insert 1717
+insert 1566
+insert 1151
+insert 1116
+insert 3172
+insert 2605
+insert 2641
+insert 1227
+insert 1230
+insert 1256
+insert 2725
+insert 1588
+insert 1529
+insert 1173
+insert 3142
+insert 3125
+insert 1581
+insert 578
+insert 3228
+insert 3239
+insert 599
+insert 759
+insert 3016
+insert 3025
+insert 619
+insert 653
+insert 626
+insert 2768
+insert 754
+insert 3231
+insert 3260
+insert 791
+insert 756
+insert 2405
+insert 2404
+insert 1474
+insert 1213
+insert 1211
+insert 2019
+insert 3077
+insert 3087
+insert 2555
+insert 1699
+insert 1464
+insert 2687
+insert 1578
+insert 1975
+insert 1490
+insert 1478
+insert 573
+insert 592
+insert 1655
+insert 1652
+insert 1669
+insert 1649
+insert 1092
+insert 1122
+insert 1153
+insert 2734
+insert 1676
+insert 1871
+insert 1131
+insert 2730
+insert 2729
+insert 1222
+insert 1196
+insert 2733
+insert 2793
+insert 1421
+insert 2672
+insert 1633
+insert 1472
+insert 2749
+insert 1477
+insert 1460
+insert 3050
+insert 1491
+insert 3114
+insert 2378
+insert 1427
+insert 1419
+insert 1722
+insert 1580
+insert 1449
+insert 1484
+insert 1053
+insert 1485
+insert 3085
+insert 2153
+insert 3002
+insert 2023
+insert 3008
+insert 2280
+insert 3003
+insert 2958
+insert 2972
+insert 3015
+insert 2803
+insert 3045
+insert 2127
+insert 2971
+insert 1437
+insert 1494
+insert 1486
+insert 3032
+insert 2809
+remove 1627
+insert 1428
+insert 775
+insert 5
+insert 3064
+insert 3107
+insert 3115
+insert 3138
+insert 3128
+insert 3111
+insert 1032
+insert 3075
+remove 298
+insert 3145
+insert 1572
+insert 186
+insert 1039
+insert 1086
+insert 236
+insert 1052
+insert 1385
+insert 252
+insert 1133
+insert 1108
+insert 928
+insert 1369
+insert 2116
+insert 2934
+insert 203
+insert 1113
+insert 2619
+insert 275
+insert 1844
+insert 1165
+insert 1868
+insert 194
+insert 254
+remove 297
+insert 2104
+insert 1603
+insert 2963
+insert 1672
+insert 3004
+insert 1683
+insert 1057
+insert 1807
+insert 2978
+insert 2004
+insert 1452
+insert 1482
+insert 2112
+insert 1436
+insert 1433
+insert 1602
+insert 1422
+insert 1575
+insert 1560
+insert 2649
+insert 1454
+insert 2658
+insert 2621
+insert 1702
+insert 1583
+insert 1467
+insert 1140
+insert 1229
+insert 1121
+insert 987
+insert 980
+insert 1174
+insert 1475
+insert 1615
+insert 1190
+insert 1619
+insert 1265
+insert 1621
+insert 1616
+insert 1492
+remove 1430
+insert 2905
+insert 1224
+insert 1538
+insert 1023
+insert 2807
+insert 1848
+insert 2659
+insert 1496
+insert 2710
+insert 1132
+insert 1154
+insert 1887
+insert 2593
+insert 1875
+remove 1429
+insert 1163
+insert 2983
+insert 2968
+insert 1587
+insert 222
+insert 283
+insert 263
+remove 1428
+insert 535
+insert 547
+insert 470
+insert 457
+insert 518
+insert 199
+insert 1263
+insert 1084
+insert 526
+insert 933
+insert 889
+remove 1437
+insert 246
+insert 530
+insert 903
+remove 2489
+insert 2682
+insert 2914
+insert 1716
+insert 2688
+insert 1332
+insert 2690
+insert 2722
+insert 2657
+insert 2908
+insert 1345
+insert 1582
+insert 2969
+insert 2612
+insert 213
+insert 225
+insert 1401
+insert 1340
+insert 1889
+insert 2017
+insert 961
+insert 1042
+insert 1873
+insert 2790
+insert 224
+insert 1177
+insert 202
+insert 1372
+insert 185
+insert 1520
+insert 1346
+insert 211
+insert 181
+insert 2745
+insert 2726
+insert 2870
+insert 1214
+insert 2940
+insert 2653
+insert 2219
+insert 2813
+insert 1532
+insert 2890
+insert 1195
+insert 1128
+insert 468
+insert 1592
+insert 1144
+remove 1219
+insert 3183
+insert 2774
+insert 1622
+insert 1141
+insert 3285
+insert 2274
+insert 2680
+insert 1161
+insert 1158
+insert 279
+insert 1613
+insert 142
+insert 1510
+insert 100
+insert 467
+insert 106
+insert 367
+insert 1546
+insert 1867
+insert 851
+insert 918
+insert 1845
+insert 1176
+insert 1178
+insert 1193
+insert 1884
+insert 2663
+insert 1617
+insert 1614
+insert 461
+insert 1783
+insert 2721
+insert 1851
+insert 3255
+insert 1517
+insert 237
+insert 1535
+insert 1973
+insert 2880
+insert 2883
+insert 1928
+remove 1424
+insert 2991
+insert 2202
+insert 1574
+insert 2206
+insert 538
+insert 1573
+insert 96
+insert 3
+insert 32
+insert 13
+insert 98
+insert 121
+insert 30
+insert 66
+insert 127
+insert 118
+insert 2732
+remove 1166
+insert 113
+insert 628
+insert 2246
+insert 1567
+insert 1688
+insert 1579
+insert 3014
+insert 2223
+insert 2145
+insert 2232
+insert 2190
+insert 2053
+insert 1882
+insert 381
+insert 3170
+insert 2651
+insert 3148
+insert 1805
+insert 543
+insert 406
+insert 517
+insert 454
+insert 1664
+insert 2806
+insert 1886
+insert 1865
+insert 3135
+insert 3179
+insert 2156
+insert 1628
+insert 1636
+insert 1819
+insert 2151
+insert 1968
+insert 2009
+insert 2134
+insert 3237
+insert 2886
+insert 2262
+insert 2957
+insert 2959
+insert 1972
+insert 31
+insert 842
+insert 2193
+insert 2106
+insert 2755
+insert 2645
+insert 2091
+insert 3001
+insert 2901
+insert 1678
+insert 2144
+insert 2891
+insert 1728
+insert 1645
+insert 1745
+insert 3166
+insert 1662
+insert 1366
+insert 2105
+insert 1612
+insert 2882
+insert 2528
+insert 1987
+insert 1599
+insert 229
+insert 1284
+insert 1156
+insert 1296
+insert 1297
+insert 2570
+insert 2078
+insert 2307
+insert 2565
+insert 2847
+insert 1343
+insert 1323
+insert 2900
+insert 2902
+insert 2089
+insert 1259
+insert 1288
+insert 2912
+insert 1252
+insert 256
+insert 232
+insert 230
+insert 2888
+insert 2684
+insert 2111
+insert 2090
+insert 2693
+insert 240
+insert 2898
+insert 295
+insert 2893
+insert 2264
+insert 2884
+insert 2705
+insert 2859
+insert 2904
+insert 2222
+insert 2889
+insert 389
+insert 1397
+insert 1407
+insert 1862
+insert 1250
+insert 1969
+insert 1375
+insert 1404
+insert 2433
+insert 2436
+insert 2948
+insert 1402
+insert 1320
+insert 1271
+insert 2453
+insert 2439
+insert 1300
+insert 2421
+insert 2110
+insert 2430
+insert 2121
+insert 2205
+insert 1353
+insert 509
+insert 515
+insert 1249
+insert 2919
+insert 2960
+insert 1389
+insert 139
+insert 542
+insert 2026
+insert 2039
+insert 2586
+insert 2581
+insert 2137
+insert 2936
+insert 2412
+insert 2431
+insert 2393
+insert 2362
+insert 2484
+insert 2440
+insert 2413
+insert 2468
+insert 2556
+insert 1898
+insert 1624
+insert 766
+insert 1608
+insert 1600
+insert 669
+insert 659
+insert 658
+insert 328
+insert 3221
+insert 3194
+insert 144
+insert 477
+insert 372
+insert 426
+insert 145
+insert 310
+insert 423
+insert 621
+insert 641
+insert 725
+insert 738
+insert 2093
+insert 2590
+insert 2499
+insert 1919
+insert 329
+insert 311
+insert 2580
+insert 2221
+insert 2833
+insert 2243
+insert 1564
+insert 2258
+insert 1917
+insert 2239
+insert 1901
+insert 2235
+insert 2701
+insert 2992
+insert 2752
+insert 395
+insert 2047
+insert 1967
+insert 1962
+insert 353
+insert 391
+insert 339
+insert 2211
+insert 419
+insert 409
+insert 2230
+insert 2247
+insert 2189
+insert 2229
+insert 907
+insert 957
+insert 1604
+insert 859
+insert 848
+insert 1838
+insert 883
+insert 1620
+insert 3213
+insert 456
+insert 2438
+insert 444
+insert 402
+insert 414
+insert 331
+insert 2448
+insert 400
+insert 284
+insert 421
+insert 365
+insert 280
+insert 208
+insert 2290
+insert 1596
+insert 1570
+insert 1594
+insert 1550
+remove 1490
+insert 1348
+insert 1344
+insert 1390
+insert 1511
+insert 1342
+insert 1306
+insert 1264
+insert 1286
+insert 2977
+insert 1371
+insert 1408
+insert 1544
+insert 1238
+insert 1242
+insert 2852
+insert 1605
+insert 1625
+insert 1334
+insert 1327
+insert 1576
+insert 2949
+insert 1565
+insert 3034
+insert 1589
+insert 2945
+insert 2973
+insert 2975
+insert 1637
+insert 2422
+insert 2860
+insert 2056
+insert 2045
+insert 2811
+insert 2812
+insert 2289
+insert 2041
+insert 2207
+insert 2920
+insert 1632
+insert 1394
+insert 3199
+insert 3287
+insert 3305
+insert 1368
+remove 3116
+insert 2021
+insert 3280
+insert 1964
+insert 2161
+insert 3273
+insert 1986
+insert 2022
+insert 2130
+insert 3240
+insert 3261
+insert 2040
+insert 3272
+insert 1883
+insert 1577
+insert 1555
+insert 1828
+insert 2055
+insert 2072
+insert 2257
+insert 2259
+insert 2070
+insert 2238
+insert 1591
+insert 2180
+insert 2120
+insert 2182
+insert 1360
+insert 2506
+insert 1382
+insert 2510
+insert 2220
+insert 1355
+insert 2141
+insert 1337
+insert 2437
+insert 2469
+insert 2443
+insert 1863
+insert 2166
+insert 2585
+insert 2139
+insert 2522
+insert 2331
+insert 2503
+insert 2210
+insert 2175
+insert 11
+insert 1515
+insert 119
+insert 1519
+insert 1593
+insert 201
+insert 258
+insert 1601
+insert 205
+insert 2176
+insert 1403
+insert 1598
+insert 1522
+insert 1514
+insert 1466
+insert 3274
+insert 1561
+insert 1393
+insert 2157
+insert 1503
+insert 910
+insert 924
+insert 861
+insert 388
+insert 3205
+insert 407
+insert 932
+insert 195
+insert 942
+insert 241
+insert 922
+insert 2024
+insert 864
+insert 1627
+insert 271
+insert 265
+insert 2376
+insert 247
+insert 2077
+insert 2277
+insert 3202
+insert 2267
+insert 2464
+insert 2446
+insert 1367
+insert 1380
+insert 2301
+insert 1405
+insert 2348
+insert 418
+insert 396
+insert 413
+insert 1363
+insert 1294
+insert 3229
+insert 3217
+remove 1411
+insert 2540
+insert 2633
+insert 1388
+insert 2577
+insert 2557
+insert 1272
+insert 2502
+insert 2516
+insert 2564
+insert 2559
+insert 2434
+insert 1953
+insert 2664
+insert 1837
+insert 2677
+insert 1827
+insert 2625
+insert 2667
+insert 2369
+insert 2360
+insert 1880
+insert 2387
+insert 335
+insert 2416
+insert 2643
+remove 1312
+insert 2410
+insert 2420
+insert 2411
+insert 2368
+insert 2374
+insert 2375
+insert 2417
+insert 2383
+insert 2395
+insert 1350
+insert 1260
+insert 1244
+insert 2123
+insert 1509
+insert 2584
+insert 2582
+insert 2578
+insert 1911
+insert 2479
+insert 2566
+insert 2346
+insert 2292
+insert 691
+insert 1277
+insert 882
+insert 846
+insert 2287
+insert 849
+insert 866
+insert 55
+insert 2526
+insert 869
+insert 827
+insert 1171
+insert 1533
+insert 350
+insert 46
+insert 135
+insert 2615
+insert 1528
+insert 1554
+insert 1539
+insert 1468
+insert 3185
+insert 1182
+insert 1479
+remove 1495
+insert 1507
+insert 1518
+insert 384
+insert 1301
+insert 1240
+insert 1254
+insert 112
+insert 75
+insert 72
+remove 1494
+insert 138
+insert 89
+insert 2840
+insert 200
+insert 1191
+insert 168
+insert 167
+insert 890
+insert 944
+insert 920
+insert 868
+insert 839
+insert 1146
+insert 1012
+insert 2255
+insert 2815
+insert 1563
+insert 2579
+insert 1545
+insert 2583
+insert 3089
+insert 1785
+insert 2273
+remove 1370
+insert 1739
+insert 3098
+insert 192
+insert 2242
+insert 172
+insert 3091
+insert 935
+insert 221
+insert 251
+insert 178
+insert 1751
+insert 369
+insert 929
+insert 82
+insert 436
+insert 276
+insert 244
+insert 387
+remove 2216
+insert 410
+insert 3053
+insert 1498
+insert 2826
+insert 366
+remove 1238
+insert 289
+insert 972
+insert 1761
+insert 441
+insert 361
+insert 401
+insert 2306
+insert 2390
+insert 2291
+insert 1199
+insert 982
+insert 270
+remove 1399
+insert 1551
+remove 1346
+insert 2169
+insert 986
+insert 394
+insert 3116
+insert 2272
+insert 3119
+insert 1494
+insert 3236
+insert 916
+insert 896
+insert 3152
+insert 3248
+insert 2132
+insert 2131
+insert 2155
+insert 1338
+insert 1043
+insert 954
+insert 3167
+insert 1562
+insert 3134
+insert 1571
+insert 898
+insert 1540
+insert 930
+insert 949
+insert 863
+insert 507
+insert 923
+insert 840
+insert 914
+insert 899
+insert 2184
+insert 2204
+insert 2128
+insert 2160
+insert 2173
+insert 2191
+insert 2171
+insert 2177
+insert 2179
+insert 1549
+insert 1569
+insert 1500
+remove 1389
+insert 1823
+insert 1812
+remove 1371
+remove 1375
+insert 1792
+insert 831
+insert 1831
+insert 1516
+insert 1822
+insert 1846
+insert 125
+insert 1799
+insert 129
+insert 1825
+insert 1836
+insert 210
+insert 188
+insert 1499
+insert 174
+insert 99
+insert 1833
+insert 1028
+insert 1024
+insert 1362
+insert 217
+insert 952
+insert 838
+insert 835
+insert 886
+insert 904
+insert 1041
+insert 1047
+insert 124
+insert 179
+insert 134
+insert 173
+insert 209
+insert 215
+insert 901
+insert 19
+insert 141
+remove 1387
+insert 177
+insert 160
+insert 157
+insert 998
+insert 16
+insert 1789
+insert 1748
+insert 1784
+insert 1818
+insert 908
+insert 950
+insert 939
+insert 936
+insert 1531
+insert 919
+insert 905
+insert 937
+insert 582
+insert 664
+insert 597
+insert 906
+insert 549
+insert 506
+insert 575
+insert 479
+insert 947
+insert 76
+insert 577
+insert 56
+insert 51
+insert 623
+insert 652
+remove 1364
+insert 1537
+remove 1396
+insert 1434
+insert 1415
+insert 1346
+insert 1399
+insert 1335
+insert 1513
+remove 1403
+insert 1536
+remove 1409
+remove 1366
+insert 1391
+remove 1287
+insert 1280
+remove 1503
+remove 1406
+insert 1383
+insert 2549
+insert 2538
+insert 2495
+insert 1268
+remove 1390
+insert 2527
+remove 1493
+remove 1394
+insert 1255
+remove 1315
+remove 1311
+insert 1276
+insert 1331
+remove 1317
+insert 1400
+remove 1354
+insert 1322
+insert 1357
+insert 1481
+remove 1405
+insert 285
+insert 288
+insert 261
+insert 640
+insert 632
+insert 525
+insert 266
+insert 962
+insert 629
+insert 378
+insert 380
+remove 1359
+insert 1351
+insert 587
+insert 368
+insert 376
+insert 1333
+insert 1050
+insert 1040
+insert 337
+insert 1352
+insert 3196
+insert 1997
+insert 326
+remove 2228
+insert 17
+insert 320
+insert 86
+remove 1502
+insert 1135
+remove 1298
+insert 132
+insert 156
+insert 1361
+insert 1215
+insert 2775
+insert 122
+insert 2816
+insert 2800
+insert 2799
+insert 1208
+insert 1285
+insert 1329
+insert 1201
+insert 1951
+insert 2786
+insert 2783
+insert 2822
+insert 2827
+insert 2805
+insert 2956
+insert 2862
+insert 2988
+insert 953
+insert 2791
+insert 2844
+insert 2777
+insert 2846
+insert 2785
+insert 2951
+insert 430
+insert 375
+insert 104
+insert 154
+insert 427
+insert 373
+insert 1025
+insert 146
+insert 379
+insert 894
+insert 867
+insert 969
+insert 159
+insert 887
+insert 876
+insert 176
+insert 1027
+insert 1119
+insert 994
+insert 917
+insert 1349
+insert 153
+insert 1033
+insert 875
+insert 0
+insert 1336
+insert 912
+remove 1357
+insert 501
+insert 206
+insert 1029
+insert 1081
+insert 152
+insert 1115
+insert 545
+insert 1044
+insert 1145
+insert 909
+insert 472
+insert 1056
+insert 845
+insert 881
+insert 1112
+insert 915
+insert 536
+insert 383
+insert 544
+insert 163
+insert 558
+remove 1343
+insert 1292
+insert 103
+insert 1321
+insert 554
+insert 825
+insert 559
+insert 539
+insert 242
+insert 556
+insert 235
+insert 245
+insert 133
+insert 374
+insert 161
+insert 34
+insert 1143
+insert 1136
+insert 1059
+insert 1087
+insert 1155
+insert 143
+insert 158
+insert 123
+insert 115
+insert 166
+insert 893
+insert 94
+insert 71
+insert 88
+insert 18
+remove 1314
+insert 238
+insert 260
+insert 1130
+insert 984
+insert 1152
+insert 1079
+insert 557
+insert 223
+insert 207
+insert 257
+insert 281
+insert 292
+insert 532
+insert 197
+insert 274
+insert 497
+insert 2655
+insert 267
+insert 686
+insert 212
+insert 854
+insert 880
+insert 325
+insert 673
+insert 216
+insert 196
+insert 7
+insert 782
+insert 2669
+insert 63
+insert 744
+insert 1
+insert 277
+insert 214
+insert 364
+insert 148
+insert 57
+remove 1274
+insert 727
+insert 787
+insert 772
+insert 20
+insert 293
+insert 773
+insert 1010
+insert 183
+insert 873
+insert 617
+insert 836
+insert 878
+insert 451
+insert 313
+insert 607
+insert 1102
+insert 386
+insert 424
+insert 404
+insert 650
+insert 826
+insert 1162
+insert 794
+insert 334
+insert 101
+insert 345
+insert 110
+insert 865
+insert 226
+insert 667
+insert 1088
+insert 651
+insert 273
+insert 870
+insert 852
+insert 189
+insert 1045
+insert 1031
+insert 102
+insert 347
+insert 54
+remove 1345
+insert 354
+insert 631
+insert 1159
+insert 837
+insert 150
+insert 105
+insert 583
+insert 657
+insert 606
+insert 624
+insert 2
+insert 9
+insert 601
+insert 248
+insert 360
+insert 663
+insert 777
+insert 315
+insert 615
+insert 765
+insert 750
+insert 593
+insert 595
+insert 548
+insert 979
+insert 1049
+insert 1157
+insert 1148
+insert 322
+insert 332
+insert 1051
+insert 1013
+insert 349
+insert 552
+insert 390
+insert 974
+insert 1080
+insert 392
+insert 1325
+remove 1328
+insert 1054
+insert 1034
+insert 478
+insert 287
+insert 1302
+insert 805
+insert 492
+insert 24
+insert 529
+insert 533
+insert 165
+insert 79
+insert 584
+insert 884
+insert 815
+insert 776
+insert 534
+insert 49
+insert 524
+insert 874
+insert 264
+insert 482
+insert 495
+insert 505
+insert 810
+insert 885
+insert 64
+insert 1364
+insert 726
+insert 858
+insert 249
+insert 757
+insert 743
+insert 358
+insert 921
+insert 521
+insert 739
+insert 857
+insert 318
+insert 282
+insert 951
+insert 1246
+insert 745
+insert 26
+insert 21
+insert 58
+remove 1352
+insert 302
+insert 1270
+insert 1058
+insert 8
+insert 803
+insert 316
+insert 770
+insert 262
+insert 27
+insert 611
+insert 1289
+insert 73
+insert 23
+insert 856
+insert 527
+insert 52
+insert 1253
+insert 760
+insert 120
+insert 807
+insert 643
+insert 116
+insert 362
+insert 646
+insert 668
+insert 642
+insert 227
+insert 1104
+insert 680
+insert 812
+insert 793
+insert 823
+insert 717
+insert 755
+insert 1781
+insert 1065
+insert 636
+insert 290
+insert 767
+insert 1261
+insert 1283
+insert 1295
+remove 1351
+remove 1341
+remove 1339
+insert 1262
+insert 397
+insert 408
+insert 445
+insert 746
+insert 437
+insert 710
+insert 723
+insert 403
+insert 715
+insert 434
+insert 514
+insert 681
+insert 540
+insert 463
+insert 490
+insert 438
+insert 806
+insert 420
+insert 741
+insert 714
+insert 692
+insert 458
+insert 447
+insert 448
+insert 753
+insert 412
+insert 728
+insert 718
+remove 1296
+remove 1276
+insert 735
+insert 1209
+insert 1036
+insert 1124
+insert 722
+insert 748
+insert 742
+insert 724
+insert 1181
+insert 751
+insert 733
+insert 416
+insert 1170
+insert 446
+insert 2881
+remove 1358
+remove 1288
+insert 449
+insert 40
+insert 33
+insert 130
+insert 1456
+insert 1418
+remove 1489
+insert 1455
+insert 1453
+remove 1473
+remove 1478
+remove 1484
+insert 2154
+insert 1470
+insert 1462
+remove 1487
+insert 1909
+insert 1914
+insert 1465
+insert 1853
+remove 1410
+insert 2252
+remove 1483
+remove 1467
+remove 1316
+insert 948
+insert 1296
+insert 1750
+insert 171
+insert 1238
+insert 749
+insert 697
+insert 219
+insert 721
+insert 828
+insert 2327
+insert 737
+insert 1760
+insert 1775
+insert 1782
+insert 685
+insert 1778
+insert 731
+insert 2352
+insert 706
+insert 1762
+insert 679
+insert 732
+insert 1767
+insert 2325
+insert 2208
+insert 1788
+insert 1772
+insert 1771
+insert 1765
+insert 752
+insert 674
+insert 690
+insert 2353
+insert 1980
+insert 2142
+insert 2286
+insert 2200
+insert 1954
+insert 2338
+insert 453
+insert 411
+insert 2339
+insert 2192
+insert 1749
+insert 2334
+insert 1993
+insert 2015
+insert 428
+insert 2322
+insert 433
+insert 2178
+insert 1727
+remove 1488
+remove 1363
+insert 1916
+insert 2335
+insert 1311
+remove 1461
+insert 2332
+insert 2319
+insert 2930
+insert 1278
+insert 2753
+insert 1754
+insert 2321
+insert 2851
+insert 2214
+insert 2172
+insert 2170
+insert 1314
+remove 1283
+insert 1786
+insert 250
+remove 1355
+insert 1317
+insert 1958
+remove 1325
+insert 2839
+insert 1074
+insert 1752
+insert 2124
+remove 2259
+insert 1457
+insert 1487
+insert 2146
+insert 2138
+remove 2257
+insert 2150
+insert 2241
+insert 1937
+insert 2148
+insert 1730
+insert 1308
+insert 1239
+remove 1165
+insert 2183
+insert 1920
+insert 834
+insert 764
+insert 853
+remove 2258
+insert 2233
+insert 1842
+insert 1943
+insert 847
+insert 783
+insert 1935
+insert 816
+insert 2186
+insert 2126
+insert 2149
+insert 792
+insert 1927
+insert 1315
+insert 1912
+remove 1280
+insert 1902
+insert 1915
+insert 820
+remove 1324
+insert 1923
+insert 1390
+insert 813
+insert 695
+insert 817
+insert 1926
+insert 1895
+insert 705
+insert 1389
+insert 790
+insert 682
+remove 1491
+insert 786
+insert 796
+insert 804
+insert 800
+insert 1950
+insert 811
+insert 771
+insert 435
+insert 452
+insert 464
+insert 432
+insert 784
+insert 900
+remove 1957
+insert 417
+insert 425
+insert 2695
+insert 2796
+insert 415
+insert 48
+insert 801
+insert 802
+insert 1069
+insert 603
+insert 460
+insert 422
+insert 439
+insert 774
+remove 1448
+insert 779
+insert 579
+insert 809
+insert 762
+insert 778
+insert 798
+insert 622
+insert 797
+insert 819
+insert 814
+insert 565
+insert 780
+insert 600
+insert 781
+insert 789
+insert 763
+insert 768
+insert 818
+remove 1408
+insert 1794
+insert 2350
+insert 581
+insert 2384
+insert 590
+insert 2349
+insert 466
+insert 2351
+insert 2394
+insert 608
+insert 598
+insert 519
+insert 633
+insert 485
+insert 627
+insert 2293
+insert 2253
+insert 2817
+insert 2366
+insert 500
+insert 2600
+insert 2627
+insert 2310
+remove 980
+insert 1110
+insert 1757
+insert 512
+insert 568
+insert 462
+insert 498
+remove 1342
+insert 2808
+insert 496
+insert 1258
+insert 1120
+insert 1150
+insert 91
+insert 1316
+insert 47
+insert 1312
+insert 1324
+insert 513
+insert 499
+insert 481
+insert 465
+insert 483
+insert 474
+insert 487
+insert 491
+insert 522
+insert 502
+insert 934
+remove 1300
+insert 510
+remove 1297
+remove 1273
+insert 1946
+insert 476
+insert 97
+insert 1138
+insert 503
+insert 1357
+insert 489
+insert 1147
+insert 42
+insert 1267
+insert 1248
+remove 1329
+insert 486
+insert 520
+insert 1082
+remove 1272
+insert 1068
+remove 1360
+insert 50
+remove 987
+remove 1364
+remove 1314
+insert 1283
+insert 1280
+insert 84
+remove 1344
+remove 1318
+insert 1341
+insert 59
+insert 471
+insert 1077
+insert 473
+insert 1070
+remove 1299
+remove 1315
+remove 1282
+insert 1276
+insert 3132
+remove 1347
+insert 3124
+insert 516
+insert 488
+remove 1362
+insert 300
+insert 1891
+insert 312
+insert 1118
+insert 475
+insert 359
+insert 1362
+insert 309
+insert 1067
+insert 1830
+insert 3027
+insert 2052
+insert 1816
+insert 2027
+insert 1098
+insert 1855
+insert 1078
+insert 1817
+insert 484
+insert 1142
+remove 962
+insert 1062
+insert 2563
+insert 1839
+insert 2576
+insert 1072
+insert 1107
+insert 992
+insert 1020
+insert 355
+insert 343
+insert 1094
+insert 1064
+insert 1125
+insert 1808
+insert 1236
+insert 1071
+insert 1103
+insert 1233
+insert 1840
+insert 1850
+remove 1235
+insert 1089
+insert 1815
+insert 1763
+insert 976
+insert 1773
+insert 1091
+insert 1018
+insert 1083
+insert 1075
+insert 1114
+insert 35
+insert 1123
+insert 1106
+insert 1834
+insert 1017
+insert 1117
+remove 1269
+insert 1111
+insert 1247
+insert 993
+insert 604
+insert 1849
+insert 1820
+insert 1105
+insert 2336
+insert 1095
+insert 569
+insert 2320
+insert 341
+insert 3249
+insert 983
+insert 308
+insert 306
+insert 2551
+insert 564
+remove 1311
+insert 1011
+insert 2311
+insert 1841
+insert 2871
+insert 2865
+insert 610
+insert 2828
+insert 609
+insert 2829
+insert 2834
+insert 2837
+insert 2836
+insert 321
+insert 2861
+insert 2324
+insert 2875
+insert 995
+insert 585
+insert 570
+insert 616
+insert 1906
+insert 1900
+insert 2874
+insert 2312
+insert 2787
+insert 80
+insert 2372
+insert 970
+insert 588
+insert 95
+insert 1126
+insert 324
+insert 602
+insert 571
+insert 589
+insert 561
+insert 586
+insert 342
+insert 61
+insert 2835
+insert 36
+insert 2879
+insert 2856
+insert 317
+insert 39
+insert 572
+insert 1009
+insert 654
+insert 625
+insert 2857
+insert 2830
+insert 2854
+insert 2345
+insert 2878
+insert 614
+insert 1073
+insert 1096
+insert 580
+remove 674
+insert 1005
+remove 681
+insert 635
+insert 1007
+insert 2491
+insert 2606
+insert 298
+remove 706
+insert 356
+insert 645
+insert 2595
+remove 698
+remove 692
+insert 2624
+insert 630
+insert 2642
+remove 668
+insert 2864
+insert 37
+insert 3245
+insert 2849
+insert 980
+insert 2877
+insert 2868
+insert 977
+insert 2316
+insert 2842
+insert 67
+remove 659
+insert 2500
+insert 336
+insert 81
+insert 3192
+remove 720
+remove 1164
+insert 3244
+insert 2824
+insert 2543
+insert 2295
+insert 703
+insert 2328
+insert 348
+insert 323
+insert 2823
+insert 2855
+insert 566
+insert 562
+insert 2831
+insert 2654
+insert 330
+insert 596
+insert 2700
+insert 981
+insert 346
+insert 299
+insert 591
+insert 38
+insert 1970
+remove 1268
+insert 2355
+insert 2507
+insert 2079
+insert 43
+insert 74
+insert 301
+insert 357
+insert 327
+insert 351
+insert 1990
+insert 2323
+insert 297
+insert 2300
+insert 594
+insert 2330
+insert 344
+insert 965
+insert 967
+insert 307
+insert 304
+insert 340
+insert 3241
+insert 3284
+insert 2673
+remove 1090
+insert 319
+insert 87
+insert 2699
+insert 305
+insert 85
+insert 3283
+insert 77
+insert 1008
+insert 93
+insert 2671
+insert 2530
+insert 684
+insert 78
+insert 303
+insert 2661
+insert 674
+insert 707
+insert 3169
+remove 1234
+remove 1104
+insert 2020
+insert 996
+insert 68
+insert 92
+insert 69
+insert 963
+insert 70
+insert 1977
+insert 2033
+insert 2524
+insert 1004
+insert 2450
+insert 60
+insert 701
+insert 44
+insert 671
+insert 1097
+insert 1099
+insert 720
+insert 962
+insert 1021
+insert 702
+insert 997
+insert 2709
+insert 2462
+remove 1096
+insert 2370
+insert 2298
+insert 2676
+remove 1199
+insert 2414
+insert 687
+insert 2080
+remove 1095
+insert 706
+insert 1100
+insert 1101
+insert 2668
+insert 1127
+insert 2046
+insert 2060
+insert 2044
+remove 1110
+insert 699
+insert 2714
+insert 2472
+insert 2014
+remove 1120
+insert 3234
+insert 2428
+insert 3250
+insert 1994
+insert 700
+insert 3233
+insert 964
+insert 2998
+insert 3193
+insert 2011
+insert 709
+insert 2030
+insert 3257
+insert 3264
+insert 3216
+insert 1000
+insert 3276
+insert 2717
+insert 2660
+insert 975
+remove 1087
+insert 3278
+insert 2533
+insert 3268
+insert 990
+insert 1001
+insert 2487
+insert 973
+insert 2034
+remove 1111
+insert 2517
+insert 1063
+insert 3306
+insert 696
+insert 1061
+insert 2552
+remove 1119
+insert 2432
+insert 2707
+insert 2326
+insert 716
+insert 660
+remove 1122
+insert 3271
+insert 2424
+insert 3013
+insert 3238
+insert 3258
+insert 692
+insert 3203
+insert 2294
+insert 1979
+insert 2997
+insert 2391
+insert 2029
+insert 1630
+insert 956
+insert 2505
+insert 3215
+insert 1641
+insert 3302
+insert 2367
+insert 676
+insert 2381
+insert 666
+insert 675
+insert 1015
+remove 1112
+insert 2071
+insert 3043
+insert 3301
+insert 2475
+insert 698
+insert 2031
+insert 3048
+insert 2644
+insert 2419
+insert 2059
+insert 2550
+insert 3046
+insert 2067
+insert 3312
+insert 3296
+insert 3006
+insert 2965
+insert 2460
+insert 2652
+insert 2758
+insert 677
+insert 1959
+insert 713
+insert 2359
+insert 2694
+insert 2674
+insert 2703
+insert 1684
+insert 2713
+insert 1653
+insert 2964
+insert 2389
+insert 3086
+insert 3204
+insert 3021
+insert 2639
+insert 2616
+insert 3246
+insert 3195
+insert 2795
+insert 3036
+insert 3207
+insert 2630
+insert 1682
+insert 2670
+insert 2085
+insert 3297
+insert 711
+insert 2712
+insert 2477
+insert 2681
+insert 659
+insert 2706
+insert 2683
+insert 3219
+insert 3282
+insert 3101
+insert 3109
+insert 2049
+insert 2007
+insert 3094
+insert 2716
+insert 2451
+insert 2922
+insert 2081
+insert 1629
+insert 2075
+insert 2483
+insert 2025
+insert 2691
+insert 693
+insert 2602
+insert 2513
+insert 2662
+insert 681
+insert 662
+insert 678
+insert 2986
+insert 3292
+insert 2492
+insert 3251
+insert 661
+insert 1988
+insert 2032
+insert 689
+insert 708
+insert 2944
+insert 694
+insert 2697
+insert 2604
+insert 2620
+insert 2435
+insert 3029
+insert 2970
+insert 2974
+insert 2607
+insert 2766
+insert 3061
+insert 3103
+insert 1674
+insert 1673
+insert 2648
+insert 3009
+insert 2051
+insert 1852
+insert 2082
+insert 2008
+insert 3079
+insert 1971
+insert 3212
+insert 2084
+insert 1656
+insert 665
+insert 1685
+insert 2990
+insert 2984
+insert 2942
+insert 2536
+insert 3190
+insert 2062
+insert 3188
+insert 3073
+insert 3102
+insert 3096
+insert 2987
+insert 3092
+insert 2760
+insert 2631
+insert 2066
+insert 2943
+insert 2926
+insert 2950
+insert 2058
+insert 2589
+insert 2598
+insert 3131
+insert 3120
+insert 1832
+insert 2363
+insert 2083
+insert 2933
+insert 2967
+insert 3082
+insert 3100
+insert 2415
+insert 2776
+insert 1983
+insert 1813
+insert 3099
+insert 2493
+insert 2597
+insert 2537
+insert 2010
+insert 1643
+insert 3191
+insert 2989
+insert 2937
+insert 3149
+insert 3057
+insert 2013
+insert 2929
+insert 3176
+insert 2636
+insert 3163
+remove 1163
+insert 1984
+insert 2000
+insert 2608
+insert 2043
+insert 3137
+insert 3189
+insert 3263
+insert 2994
+insert 2498
+insert 2596
+insert 1800
+insert 3259
+insert 1759
+insert 2762
+insert 3023
+insert 3017
+insert 2955
+insert 2592
+insert 2511
+insert 3005
+insert 3097
+insert 3121
+insert 2629
+insert 2534
+insert 2927
+insert 2544
+insert 3055
+insert 2640
+insert 1821
+insert 3300
+insert 3153
+insert 2954
+insert 3275
+insert 3227
+insert 3168
+insert 3225
+insert 1801
+insert 1744
+insert 3154
+insert 3143
+insert 2452
+insert 2614
+insert 3044
+insert 2611
+insert 2409
+insert 2407
+insert 3070
+insert 2545
+insert 2953
+insert 3243
+insert 2947
+insert 2626
+insert 2398
+insert 2392
+insert 3222
+insert 3108
+insert 2377
+insert 3177
+insert 3150
+insert 2490
+insert 2456
+insert 2478
+insert 3155
+insert 2535
+insert 2501
+insert 2380
+insert 3232
+insert 1670
+remove 1129
+insert 2985
+insert 2408
+insert 2473
+insert 3122
+insert 1991
+insert 3067
+insert 1963
+insert 2423
+insert 2928
+insert 2458
+insert 2382
+insert 2993
+insert 3019
+insert 2489
+insert 3139
+insert 3068
+insert 3088
+insert 1961
+insert 2003
+insert 3146
+insert 3066
+insert 3173
+insert 3162
+insert 2016
+insert 2486
+insert 3042
+insert 1995
+insert 2444
+insert 2457
+insert 3069
+insert 3157
+insert 3186
+insert 2939
+insert 1646
+insert 2470
+insert 2982
+insert 2547
+insert 2780
+insert 3265
+insert 3178
+insert 1681
+insert 2788
+insert 3218
+insert 2012
+insert 3253
+insert 3252
+insert 1668
+insert 2789
+insert 3041
+insert 2938
+insert 2754
+insert 2002
+insert 1644
+insert 2343
+insert 3180
+insert 1675
+insert 3230
+insert 1631
+insert 2504
+insert 2767
+insert 3126
+insert 3254
+insert 3310
+insert 2466
+insert 3141
+insert 2770
+insert 3311
+insert 1907
+insert 3286
+insert 3110
+insert 3051
+insert 1957
+insert 2772
+insert 1944
+insert 1989
+insert 3256
+insert 2529
+insert 2317
+insert 3040
+insert 1658
+insert 1893
+insert 3104
+insert 2810
+insert 2482
+insert 2979
+insert 3058
+insert 2474
+insert 1671
+insert 1659
+insert 3007
+insert 2541
+insert 966
+insert 2459
+insert 2546
+insert 1677
+insert 2496
+insert 1635
+insert 1650
+insert 1680
+insert 2403
+insert 3106
+insert 2426
+insert 3080
+insert 2447
+insert 3022
+insert 3210
+insert 2304
+insert 2996
+insert 2465
+insert 2449
+insert 2371
+insert 2765
+insert 2514
+insert 3197
+insert 2427
+remove 1086
diff --git a/vendor/im-rc/src/proptest.rs b/vendor/im-rc/src/proptest.rs
new file mode 100644
index 000000000..9180ef273
--- /dev/null
+++ b/vendor/im-rc/src/proptest.rs
@@ -0,0 +1,164 @@
+//! Proptest strategies.
+//!
+//! These are only available when using the `proptest` feature flag.
+
+use crate::{HashMap, HashSet, OrdMap, OrdSet, Vector};
+use ::proptest::collection::vec;
+use ::proptest::strategy::{BoxedStrategy, Strategy, ValueTree};
+use std::hash::Hash;
+use std::iter::FromIterator;
+use std::ops::Range;
+
+/// A strategy for generating a [`Vector`][Vector] of a certain size.
+///
+/// # Examples
+///
+/// ```rust,no_run
+/// # use ::proptest::proptest;
+/// proptest! {
+/// #[test]
+/// fn proptest_a_vector(ref l in vector(".*", 10..100)) {
+/// assert!(l.len() < 100);
+/// assert!(l.len() >= 10);
+/// }
+/// }
+/// ```
+///
+/// [Vector]: ../struct.Vector.html
+pub fn vector<A: Strategy + 'static>(
+ element: A,
+ size: Range<usize>,
+) -> BoxedStrategy<Vector<<A::Tree as ValueTree>::Value>>
+where
+ <A::Tree as ValueTree>::Value: Clone,
+{
+ vec(element, size).prop_map(Vector::from_iter).boxed()
+}
+
+/// A strategy for an [`OrdMap`][OrdMap] of a given size.
+///
+/// # Examples
+///
+/// ```rust,no_run
+/// # use ::proptest::proptest;
+/// proptest! {
+/// #[test]
+/// fn proptest_works(ref m in ord_map(0..9999, ".*", 10..100)) {
+/// assert!(m.len() < 100);
+/// assert!(m.len() >= 10);
+/// }
+/// }
+/// ```
+///
+/// [OrdMap]: ../struct.OrdMap.html
+pub fn ord_map<K: Strategy + 'static, V: Strategy + 'static>(
+ key: K,
+ value: V,
+ size: Range<usize>,
+) -> BoxedStrategy<OrdMap<<K::Tree as ValueTree>::Value, <V::Tree as ValueTree>::Value>>
+where
+ <K::Tree as ValueTree>::Value: Ord + Clone,
+ <V::Tree as ValueTree>::Value: Clone,
+{
+ ::proptest::collection::vec((key, value), size.clone())
+ .prop_map(OrdMap::from)
+ .prop_filter("OrdMap minimum size".to_owned(), move |m| {
+ m.len() >= size.start
+ })
+ .boxed()
+}
+
+/// A strategy for an [`OrdSet`][OrdSet] of a given size.
+///
+/// # Examples
+///
+/// ```rust,no_run
+/// # use ::proptest::proptest;
+/// proptest! {
+/// #[test]
+/// fn proptest_a_set(ref s in ord_set(".*", 10..100)) {
+/// assert!(s.len() < 100);
+/// assert!(s.len() >= 10);
+/// }
+/// }
+/// ```
+///
+/// [OrdSet]: ../struct.OrdSet.html
+pub fn ord_set<A: Strategy + 'static>(
+ element: A,
+ size: Range<usize>,
+) -> BoxedStrategy<OrdSet<<A::Tree as ValueTree>::Value>>
+where
+ <A::Tree as ValueTree>::Value: Ord + Clone,
+{
+ ::proptest::collection::vec(element, size.clone())
+ .prop_map(OrdSet::from)
+ .prop_filter("OrdSet minimum size".to_owned(), move |s| {
+ s.len() >= size.start
+ })
+ .boxed()
+}
+
+/// A strategy for a [`HashMap`][HashMap] of a given size.
+///
+/// # Examples
+///
+/// ```rust,no_run
+/// # use ::proptest::proptest;
+/// proptest! {
+/// #[test]
+/// fn proptest_works(ref m in hash_map(0..9999, ".*", 10..100)) {
+/// assert!(m.len() < 100);
+/// assert!(m.len() >= 10);
+/// }
+/// }
+/// ```
+///
+/// [HashMap]: ../struct.HashMap.html
+pub fn hash_map<K: Strategy + 'static, V: Strategy + 'static>(
+ key: K,
+ value: V,
+ size: Range<usize>,
+) -> BoxedStrategy<HashMap<<K::Tree as ValueTree>::Value, <V::Tree as ValueTree>::Value>>
+where
+ <K::Tree as ValueTree>::Value: Hash + Eq + Clone,
+ <V::Tree as ValueTree>::Value: Clone,
+{
+ ::proptest::collection::vec((key, value), size.clone())
+ .prop_map(HashMap::from)
+ .prop_filter("Map minimum size".to_owned(), move |m| {
+ m.len() >= size.start
+ })
+ .boxed()
+}
+
+/// A strategy for a [`HashSet`][HashSet] of a given size.
+///
+/// # Examples
+///
+/// ```rust,no_run
+/// # use ::proptest::proptest;
+/// proptest! {
+/// #[test]
+/// fn proptest_a_set(ref s in hash_set(".*", 10..100)) {
+/// assert!(s.len() < 100);
+/// assert!(s.len() >= 10);
+/// }
+/// }
+/// ```
+///
+/// [HashSet]: ../struct.HashSet.html
+pub fn hash_set<A: Strategy + 'static>(
+ element: A,
+ size: Range<usize>,
+) -> BoxedStrategy<HashSet<<A::Tree as ValueTree>::Value>>
+where
+ <A::Tree as ValueTree>::Value: Hash + Eq + Clone,
+{
+ ::proptest::collection::vec(element, size.clone())
+ .prop_map(HashSet::from)
+ .prop_filter("HashSet minimum size".to_owned(), move |s| {
+ s.len() >= size.start
+ })
+ .boxed()
+}
diff --git a/vendor/im-rc/src/quickcheck.rs b/vendor/im-rc/src/quickcheck.rs
new file mode 100644
index 000000000..3faade751
--- /dev/null
+++ b/vendor/im-rc/src/quickcheck.rs
@@ -0,0 +1,43 @@
+use crate::{HashMap, HashSet, OrdMap, OrdSet, Vector};
+use ::quickcheck::{Arbitrary, Gen};
+use std::hash::{BuildHasher, Hash};
+use std::iter::FromIterator;
+
+impl<A: Arbitrary + Sync + Clone> Arbitrary for Vector<A> {
+ fn arbitrary(g: &mut Gen) -> Self {
+ Vector::from_iter(Vec::<A>::arbitrary(g))
+ }
+}
+
+impl<K: Ord + Clone + Arbitrary + Sync, V: Clone + Arbitrary + Sync> Arbitrary for OrdMap<K, V> {
+ fn arbitrary(g: &mut Gen) -> Self {
+ OrdMap::from_iter(Vec::<(K, V)>::arbitrary(g))
+ }
+}
+
+impl<A: Ord + Clone + Arbitrary + Sync> Arbitrary for OrdSet<A> {
+ fn arbitrary(g: &mut Gen) -> Self {
+ OrdSet::from_iter(Vec::<A>::arbitrary(g))
+ }
+}
+
+impl<A, S> Arbitrary for HashSet<A, S>
+where
+ A: Hash + Eq + Arbitrary + Sync,
+ S: BuildHasher + Default + Send + Sync + 'static,
+{
+ fn arbitrary(g: &mut Gen) -> Self {
+ HashSet::from_iter(Vec::<A>::arbitrary(g))
+ }
+}
+
+impl<K, V, S> Arbitrary for HashMap<K, V, S>
+where
+ K: Hash + Eq + Arbitrary + Sync,
+ V: Arbitrary + Sync,
+ S: BuildHasher + Default + Send + Sync + 'static,
+{
+ fn arbitrary(g: &mut Gen) -> Self {
+ HashMap::from(Vec::<(K, V)>::arbitrary(g))
+ }
+}
diff --git a/vendor/im-rc/src/ser.rs b/vendor/im-rc/src/ser.rs
new file mode 100644
index 000000000..d9a35e5f3
--- /dev/null
+++ b/vendor/im-rc/src/ser.rs
@@ -0,0 +1,293 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use serde::de::{Deserialize, Deserializer, MapAccess, SeqAccess, Visitor};
+use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
+use std::fmt;
+use std::hash::{BuildHasher, Hash};
+use std::marker::PhantomData;
+use std::ops::Deref;
+
+use crate::hashmap::HashMap;
+use crate::hashset::HashSet;
+use crate::ordmap::OrdMap;
+use crate::ordset::OrdSet;
+use crate::vector::Vector;
+
+struct SeqVisitor<'de, S, A>
+where
+ S: From<Vec<A>>,
+ A: Deserialize<'de>,
+{
+ phantom_s: PhantomData<S>,
+ phantom_a: PhantomData<A>,
+ phantom_lifetime: PhantomData<&'de ()>,
+}
+
+impl<'de, S, A> SeqVisitor<'de, S, A>
+where
+ S: From<Vec<A>>,
+ A: Deserialize<'de>,
+{
+ pub(crate) fn new() -> SeqVisitor<'de, S, A> {
+ SeqVisitor {
+ phantom_s: PhantomData,
+ phantom_a: PhantomData,
+ phantom_lifetime: PhantomData,
+ }
+ }
+}
+
+impl<'de, S, A> Visitor<'de> for SeqVisitor<'de, S, A>
+where
+ S: From<Vec<A>>,
+ A: Deserialize<'de>,
+{
+ type Value = S;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a sequence")
+ }
+
+ fn visit_seq<Access>(self, mut access: Access) -> Result<Self::Value, Access::Error>
+ where
+ Access: SeqAccess<'de>,
+ {
+ let mut v: Vec<A> = match access.size_hint() {
+ None => Vec::new(),
+ Some(l) => Vec::with_capacity(l),
+ };
+ while let Some(i) = access.next_element()? {
+ v.push(i)
+ }
+ Ok(From::from(v))
+ }
+}
+
+struct MapVisitor<'de, S, K, V>
+where
+ S: From<Vec<(K, V)>>,
+ K: Deserialize<'de>,
+ V: Deserialize<'de>,
+{
+ phantom_s: PhantomData<S>,
+ phantom_k: PhantomData<K>,
+ phantom_v: PhantomData<V>,
+ phantom_lifetime: PhantomData<&'de ()>,
+}
+
+impl<'de, S, K, V> MapVisitor<'de, S, K, V>
+where
+ S: From<Vec<(K, V)>>,
+ K: Deserialize<'de>,
+ V: Deserialize<'de>,
+{
+ pub(crate) fn new() -> MapVisitor<'de, S, K, V> {
+ MapVisitor {
+ phantom_s: PhantomData,
+ phantom_k: PhantomData,
+ phantom_v: PhantomData,
+ phantom_lifetime: PhantomData,
+ }
+ }
+}
+
+impl<'de, S, K, V> Visitor<'de> for MapVisitor<'de, S, K, V>
+where
+ S: From<Vec<(K, V)>>,
+ K: Deserialize<'de>,
+ V: Deserialize<'de>,
+{
+ type Value = S;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str("a sequence")
+ }
+
+ fn visit_map<Access>(self, mut access: Access) -> Result<Self::Value, Access::Error>
+ where
+ Access: MapAccess<'de>,
+ {
+ let mut v: Vec<(K, V)> = match access.size_hint() {
+ None => Vec::new(),
+ Some(l) => Vec::with_capacity(l),
+ };
+ while let Some(i) = access.next_entry()? {
+ v.push(i)
+ }
+ Ok(From::from(v))
+ }
+}
+
+// Set
+
+impl<'de, A: Deserialize<'de> + Ord + Clone> Deserialize<'de> for OrdSet<A> {
+ fn deserialize<D>(des: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ des.deserialize_seq(SeqVisitor::new())
+ }
+}
+
+impl<A: Ord + Clone + Serialize> Serialize for OrdSet<A> {
+ fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut s = ser.serialize_seq(Some(self.len()))?;
+ for i in self.iter() {
+ s.serialize_element(i.deref())?;
+ }
+ s.end()
+ }
+}
+
+// Map
+
+impl<'de, K: Deserialize<'de> + Ord + Clone, V: Deserialize<'de> + Clone> Deserialize<'de>
+ for OrdMap<K, V>
+{
+ fn deserialize<D>(des: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ des.deserialize_map(MapVisitor::<'de, OrdMap<K, V>, K, V>::new())
+ }
+}
+
+impl<K: Serialize + Ord + Clone, V: Serialize + Clone> Serialize for OrdMap<K, V> {
+ fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut s = ser.serialize_map(Some(self.len()))?;
+ for (k, v) in self.iter() {
+ s.serialize_entry(k.deref(), v.deref())?;
+ }
+ s.end()
+ }
+}
+
+// HashMap
+
+impl<'de, K, V, S> Deserialize<'de> for HashMap<K, V, S>
+where
+ K: Deserialize<'de> + Hash + Eq + Clone,
+ V: Deserialize<'de> + Clone,
+ S: BuildHasher + Default,
+{
+ fn deserialize<D>(des: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ des.deserialize_map(MapVisitor::<'de, HashMap<K, V, S>, K, V>::new())
+ }
+}
+
+impl<K, V, S> Serialize for HashMap<K, V, S>
+where
+ K: Serialize + Hash + Eq + Clone,
+ V: Serialize + Clone,
+ S: BuildHasher + Default,
+{
+ fn serialize<Ser>(&self, ser: Ser) -> Result<Ser::Ok, Ser::Error>
+ where
+ Ser: Serializer,
+ {
+ let mut s = ser.serialize_map(Some(self.len()))?;
+ for (k, v) in self.iter() {
+ s.serialize_entry(k.deref(), v.deref())?;
+ }
+ s.end()
+ }
+}
+
+// HashSet
+
+impl<'de, A: Deserialize<'de> + Hash + Eq + Clone, S: BuildHasher + Default> Deserialize<'de>
+ for HashSet<A, S>
+{
+ fn deserialize<D>(des: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ des.deserialize_seq(SeqVisitor::new())
+ }
+}
+
+impl<A: Serialize + Hash + Eq + Clone, S: BuildHasher + Default> Serialize for HashSet<A, S> {
+ fn serialize<Ser>(&self, ser: Ser) -> Result<Ser::Ok, Ser::Error>
+ where
+ Ser: Serializer,
+ {
+ let mut s = ser.serialize_seq(Some(self.len()))?;
+ for i in self.iter() {
+ s.serialize_element(i.deref())?;
+ }
+ s.end()
+ }
+}
+
+// Vector
+
+impl<'de, A: Clone + Deserialize<'de>> Deserialize<'de> for Vector<A> {
+ fn deserialize<D>(des: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
+ {
+ des.deserialize_seq(SeqVisitor::<'de, Vector<A>, A>::new())
+ }
+}
+
+impl<A: Clone + Serialize> Serialize for Vector<A> {
+ fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ let mut s = ser.serialize_seq(Some(self.len()))?;
+ for i in self.iter() {
+ s.serialize_element(i.deref())?;
+ }
+ s.end()
+ }
+}
+
+// Tests
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::proptest::{hash_map, hash_set, ord_map, ord_set, vector};
+ use ::proptest::num::i32;
+ use ::proptest::proptest;
+ use serde_json::{from_str, to_string};
+
+ proptest! {
+ #[test]
+ fn ser_ordset(ref v in ord_set(i32::ANY, 0..100)) {
+ assert_eq!(v, &from_str::<OrdSet<i32>>(&to_string(&v).unwrap()).unwrap());
+ }
+
+ #[test]
+ fn ser_ordmap(ref v in ord_map(i32::ANY, i32::ANY, 0..100)) {
+ assert_eq!(v, &from_str::<OrdMap<i32, i32>>(&to_string(&v).unwrap()).unwrap());
+ }
+
+ #[test]
+ fn ser_hashmap(ref v in hash_map(i32::ANY, i32::ANY, 0..100)) {
+ assert_eq!(v, &from_str::<HashMap<i32, i32>>(&to_string(&v).unwrap()).unwrap());
+ }
+
+ #[test]
+ fn ser_hashset(ref v in hash_set(i32::ANY, 0..100)) {
+ assert_eq!(v, &from_str::<HashSet<i32>>(&to_string(&v).unwrap()).unwrap());
+ }
+
+ #[test]
+ fn ser_vector(ref v in vector(i32::ANY, 0..100)) {
+ assert_eq!(v, &from_str::<Vector<i32>>(&to_string(&v).unwrap()).unwrap());
+ }
+ }
+}
diff --git a/vendor/im-rc/src/sort.rs b/vendor/im-rc/src/sort.rs
new file mode 100644
index 000000000..6c980019a
--- /dev/null
+++ b/vendor/im-rc/src/sort.rs
@@ -0,0 +1,203 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use crate::vector::FocusMut;
+use rand_core::{RngCore, SeedableRng};
+use std::cmp::Ordering;
+use std::mem;
+
+fn gen_range<R: RngCore>(rng: &mut R, min: usize, max: usize) -> usize {
+ let range = max - min;
+ min + (rng.next_u64() as usize % range)
+}
+
+// Ported from the Java version at:
+// http://www.cs.princeton.edu/~rs/talks/QuicksortIsOptimal.pdf
+// There are a couple of modifications made here to make it more performant on the tree structure of
+// the Vector. Instead of moving of handling equal and nonequal items in a single pass we make two
+// additional passes to find the exact partition places. This allows us to split the focus into
+// three correctly sized parts for less than, equal to and greater than items. As a bonus this
+// doesn't need to reorder the equal items to the center of the vector.
+fn do_quicksort<A, F, R>(vector: FocusMut<'_, A>, cmp: &F, rng: &mut R)
+where
+ A: Clone,
+ F: Fn(&A, &A) -> Ordering,
+ R: RngCore,
+{
+ if vector.len() <= 1 {
+ return;
+ }
+
+ // We know there are at least 2 elements here
+ let pivot_index = gen_range(rng, 0, vector.len());
+ let (mut first, mut rest) = vector.split_at(1);
+
+ if pivot_index > 0 {
+ mem::swap(rest.index_mut(pivot_index - 1), first.index_mut(0));
+ }
+ // Pivot is now always in the first slice
+ let pivot_item = first.index(0);
+
+ // Find the exact place to put the pivot or pivot-equal items
+ let mut less_count = 0;
+ let mut equal_count = 0;
+
+ for index in 0..rest.len() {
+ let item = rest.index(index);
+ let comp = cmp(item, pivot_item);
+ match comp {
+ Ordering::Less => less_count += 1,
+ Ordering::Equal => equal_count += 1,
+ Ordering::Greater => {}
+ }
+ }
+
+ // If by accident we picked the minimum element as a pivot, we just call sort again with the
+ // rest of the vector.
+ if less_count == 0 {
+ do_quicksort(rest, cmp, rng);
+ return;
+ }
+
+ // We know here that there is at least one item before the pivot, so we move the minimum to the
+ // beginning part of the vector. First, however we swap the pivot to the start of the equal
+ // zone.
+ less_count -= 1;
+ equal_count += 1;
+ let first_item = first.index_mut(0);
+ mem::swap(first_item, rest.index_mut(less_count));
+ for index in 0..rest.len() {
+ if index == less_count {
+ // This is the position we swapped the pivot to. We can't move it from its position, and
+ // we know its not the minimum.
+ continue;
+ }
+ let rest_item = rest.index_mut(index);
+ if cmp(rest_item, first_item) == Ordering::Less {
+ mem::swap(first_item, rest_item);
+ }
+ }
+
+ // Split the vector up into less_than, equal to and greater than parts.
+ let (remaining, mut greater_focus) = rest.split_at(less_count + equal_count);
+ let (mut less_focus, mut equal_focus) = remaining.split_at(less_count);
+
+ let mut less_position = 0;
+ let mut equal_position = 0;
+ let mut greater_position = 0;
+
+ while less_position != less_focus.len() || greater_position != greater_focus.len() {
+ // At start of this loop, equal_position always points to an equal item
+ let mut equal_swap_side = None;
+ let equal_item = equal_focus.index(equal_position);
+
+ // Advance the less_position until we find an out of place item
+ while less_position != less_focus.len() {
+ let less_item = less_focus.index(less_position);
+ match cmp(less_item, equal_item) {
+ Ordering::Equal => {
+ equal_swap_side = Some(Ordering::Less);
+ break;
+ }
+ Ordering::Greater => {
+ break;
+ }
+ _ => {}
+ }
+ less_position += 1;
+ }
+
+ // Advance the greater until we find an out of place item
+ while greater_position != greater_focus.len() {
+ let greater_item = greater_focus.index(greater_position);
+ match cmp(greater_item, equal_item) {
+ Ordering::Less => break,
+ Ordering::Equal => {
+ equal_swap_side = Some(Ordering::Greater);
+ break;
+ }
+ _ => {}
+ }
+ greater_position += 1;
+ }
+
+ if let Some(swap_side) = equal_swap_side {
+ // One of the sides is equal to the pivot, advance the pivot
+ let item = if swap_side == Ordering::Less {
+ less_focus.index_mut(less_position)
+ } else {
+ greater_focus.index_mut(greater_position)
+ };
+
+ // We are guaranteed not to hit the end of the equal focus
+ while cmp(item, equal_focus.index(equal_position)) == Ordering::Equal {
+ equal_position += 1;
+ }
+
+ // Swap the equal position and the desired side, it's important to note that only the
+ // equals focus is guaranteed to have made progress so we don't advance the side's index
+ mem::swap(item, equal_focus.index_mut(equal_position));
+ } else if less_position != less_focus.len() && greater_position != greater_focus.len() {
+ // Both sides are out of place and not equal to the pivot, this can only happen if there
+ // is a greater item in the lesser zone and a lesser item in the greater zone. The
+ // solution is to swap both sides and advance both side's indices.
+ debug_assert_ne!(
+ cmp(
+ less_focus.index(less_position),
+ equal_focus.index(equal_position)
+ ),
+ Ordering::Equal
+ );
+ debug_assert_ne!(
+ cmp(
+ greater_focus.index(greater_position),
+ equal_focus.index(equal_position)
+ ),
+ Ordering::Equal
+ );
+ mem::swap(
+ less_focus.index_mut(less_position),
+ greater_focus.index_mut(greater_position),
+ );
+ less_position += 1;
+ greater_position += 1;
+ }
+ }
+
+ // Now we have partitioned both sides correctly, we just have to recurse now
+ do_quicksort(less_focus, cmp, rng);
+ if !greater_focus.is_empty() {
+ do_quicksort(greater_focus, cmp, rng);
+ }
+}
+
+pub(crate) fn quicksort<A, F>(vector: FocusMut<'_, A>, cmp: &F)
+where
+ A: Clone,
+ F: Fn(&A, &A) -> Ordering,
+{
+ let mut rng = rand_xoshiro::Xoshiro256Plus::seed_from_u64(0);
+ do_quicksort(vector, cmp, &mut rng);
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::test::is_sorted;
+ use crate::vector::proptest::vector;
+ use ::proptest::num::i32;
+ use ::proptest::proptest;
+
+ proptest! {
+ #[test]
+ fn test_quicksort(ref input in vector(i32::ANY, 0..10000)) {
+ let mut vec = input.clone();
+ let len = vec.len();
+ if len > 1 {
+ quicksort(vec.focus_mut(), &Ord::cmp);
+ }
+ assert!(is_sorted(vec));
+ }
+ }
+}
diff --git a/vendor/im-rc/src/sync.rs b/vendor/im-rc/src/sync.rs
new file mode 100644
index 000000000..9b137555e
--- /dev/null
+++ b/vendor/im-rc/src/sync.rs
@@ -0,0 +1,69 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+pub(crate) use self::lock::Lock;
+
+#[cfg(threadsafe)]
+mod lock {
+ use std::sync::{Arc, Mutex, MutexGuard};
+
+ /// Thread safe lock: just wraps a `Mutex`.
+ pub(crate) struct Lock<A> {
+ lock: Arc<Mutex<A>>,
+ }
+
+ impl<A> Lock<A> {
+ pub(crate) fn new(value: A) -> Self {
+ Lock {
+ lock: Arc::new(Mutex::new(value)),
+ }
+ }
+
+ #[inline]
+ pub(crate) fn lock(&mut self) -> Option<MutexGuard<'_, A>> {
+ self.lock.lock().ok()
+ }
+ }
+
+ impl<A> Clone for Lock<A> {
+ fn clone(&self) -> Self {
+ Lock {
+ lock: self.lock.clone(),
+ }
+ }
+ }
+}
+
+#[cfg(not(threadsafe))]
+mod lock {
+ use std::cell::{RefCell, RefMut};
+ use std::rc::Rc;
+
+ /// Single threaded lock: a `RefCell` so we should safely panic if somehow
+ /// trying to access the stored data twice from the same thread.
+ pub(crate) struct Lock<A> {
+ lock: Rc<RefCell<A>>,
+ }
+
+ impl<A> Lock<A> {
+ pub(crate) fn new(value: A) -> Self {
+ Lock {
+ lock: Rc::new(RefCell::new(value)),
+ }
+ }
+
+ #[inline]
+ pub(crate) fn lock(&mut self) -> Option<RefMut<'_, A>> {
+ self.lock.try_borrow_mut().ok()
+ }
+ }
+
+ impl<A> Clone for Lock<A> {
+ fn clone(&self) -> Self {
+ Lock {
+ lock: self.lock.clone(),
+ }
+ }
+ }
+}
diff --git a/vendor/im-rc/src/test.rs b/vendor/im-rc/src/test.rs
new file mode 100644
index 000000000..9887d0138
--- /dev/null
+++ b/vendor/im-rc/src/test.rs
@@ -0,0 +1,86 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use metrohash::MetroHash64;
+use std::hash::{BuildHasher, Hasher};
+use std::marker::PhantomData;
+use typenum::{Unsigned, U64};
+
+pub(crate) fn is_sorted<A, I>(l: I) -> bool
+where
+ I: IntoIterator<Item = A>,
+ A: Ord,
+{
+ let mut it = l.into_iter().peekable();
+ loop {
+ match (it.next(), it.peek()) {
+ (_, None) => return true,
+ (Some(ref a), Some(b)) if a > b => return false,
+ _ => (),
+ }
+ }
+}
+
+pub(crate) struct LolHasher<N: Unsigned = U64> {
+ state: u64,
+ shift: usize,
+ size: PhantomData<N>,
+}
+
+impl<N: Unsigned> LolHasher<N> {
+ fn feed_me(&mut self, byte: u8) {
+ self.state ^= u64::from(byte) << self.shift;
+ self.shift += 8;
+ if self.shift >= 64 {
+ self.shift = 0;
+ }
+ }
+}
+
+impl<N: Unsigned> Hasher for LolHasher<N> {
+ fn write(&mut self, bytes: &[u8]) {
+ for byte in bytes {
+ self.feed_me(*byte)
+ }
+ }
+
+ fn finish(&self) -> u64 {
+ if N::USIZE == 64 {
+ self.state
+ } else {
+ self.state & ((1 << N::USIZE) - 1)
+ }
+ }
+}
+
+impl<N: Unsigned> Default for LolHasher<N> {
+ fn default() -> Self {
+ LolHasher {
+ state: 0,
+ shift: 0,
+ size: PhantomData,
+ }
+ }
+}
+
+pub(crate) struct MetroHashBuilder {
+ seed: u64,
+}
+
+impl MetroHashBuilder {
+ pub(crate) fn new(seed: u64) -> Self {
+ MetroHashBuilder { seed }
+ }
+
+ pub(crate) fn seed(&self) -> u64 {
+ self.seed
+ }
+}
+
+impl BuildHasher for MetroHashBuilder {
+ type Hasher = MetroHash64;
+ fn build_hasher(&self) -> Self::Hasher {
+ MetroHash64::with_seed(self.seed)
+ }
+}
diff --git a/vendor/im-rc/src/tests/hashset.rs b/vendor/im-rc/src/tests/hashset.rs
new file mode 100644
index 000000000..01df2be4e
--- /dev/null
+++ b/vendor/im-rc/src/tests/hashset.rs
@@ -0,0 +1,85 @@
+#![allow(clippy::unit_arg)]
+
+use std::collections::HashSet as NatSet;
+use std::fmt::{Debug, Error, Formatter, Write};
+use std::hash::Hash;
+
+use crate::HashSet;
+
+use proptest::proptest;
+use proptest_derive::Arbitrary;
+
+#[derive(Arbitrary, Debug)]
+enum Action<A> {
+ Insert(A),
+ Remove(A),
+}
+
+#[derive(Arbitrary)]
+struct Actions<A>(Vec<Action<A>>)
+where
+ A: Hash + Eq + Clone;
+
+impl<A> Debug for Actions<A>
+where
+ A: Hash + Eq + Debug + Clone,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ let mut out = String::new();
+ let mut expected = NatSet::new();
+ writeln!(out, "let mut set = HashSet::new();")?;
+ for action in &self.0 {
+ match action {
+ Action::Insert(ref value) => {
+ expected.insert(value.clone());
+ writeln!(out, "set.insert({:?});", value)?;
+ }
+ Action::Remove(ref value) => {
+ expected.remove(value);
+ writeln!(out, "set.remove({:?});", value)?;
+ }
+ }
+ }
+ writeln!(
+ out,
+ "let expected = vec!{:?};",
+ expected.into_iter().collect::<Vec<_>>()
+ )?;
+ writeln!(out, "assert_eq!(HashSet::from(expected), set);")?;
+ write!(f, "{}", super::code_fmt(&out))
+ }
+}
+
+proptest! {
+ #[test]
+ fn comprehensive(actions: Actions<u8>) {
+ let mut set = HashSet::new();
+ let mut nat = NatSet::new();
+ for action in actions.0 {
+ match action {
+ Action::Insert(value) => {
+ let len = nat.len() + if nat.contains(&value) {
+ 0
+ } else {
+ 1
+ };
+ nat.insert(value);
+ set.insert(value);
+ assert_eq!(len, set.len());
+ }
+ Action::Remove(value) => {
+ let len = nat.len() - if nat.contains(&value) {
+ 1
+ } else {
+ 0
+ };
+ nat.remove(&value);
+ set.remove(&value);
+ assert_eq!(len, set.len());
+ }
+ }
+ assert_eq!(nat.len(), set.len());
+ assert_eq!(HashSet::from(nat.clone()), set);
+ }
+ }
+}
diff --git a/vendor/im-rc/src/tests/mod.rs b/vendor/im-rc/src/tests/mod.rs
new file mode 100644
index 000000000..cafea5f9f
--- /dev/null
+++ b/vendor/im-rc/src/tests/mod.rs
@@ -0,0 +1,24 @@
+mod hashset;
+mod ordset;
+mod vector;
+
+fn code_fmt(code: &str) -> String {
+ // use syntect::easy::HighlightLines;
+ // use syntect::highlighting::{Style, ThemeSet};
+ // use syntect::parsing::SyntaxSet;
+ // use syntect::util::{as_24_bit_terminal_escaped, LinesWithEndings};
+ //
+ // let ps = SyntaxSet::load_defaults_newlines();
+ // let ts = ThemeSet::load_defaults();
+ // let syntax = ps.find_syntax_by_extension("rs").unwrap();
+ // let mut h = HighlightLines::new(syntax, &ts.themes["base16-ocean.dark"]);
+ // let mut out = String::from("\n\n");
+ // for line in LinesWithEndings::from(&code) {
+ // let ranges: Vec<(Style, &str)> = h.highlight(line, &ps);
+ // let escaped = as_24_bit_terminal_escaped(&ranges[..], false);
+ // out += &escaped;
+ // }
+ // out += "\n\x1b[0m";
+ // out
+ code.to_string()
+}
diff --git a/vendor/im-rc/src/tests/ordset.rs b/vendor/im-rc/src/tests/ordset.rs
new file mode 100644
index 000000000..15efec5c2
--- /dev/null
+++ b/vendor/im-rc/src/tests/ordset.rs
@@ -0,0 +1,85 @@
+#![allow(clippy::unit_arg)]
+
+use std::collections::BTreeSet;
+use std::fmt::{Debug, Error, Formatter, Write};
+
+use crate::OrdSet;
+
+use proptest::proptest;
+use proptest_derive::Arbitrary;
+
+#[derive(Arbitrary, Debug)]
+enum Action<A> {
+ Insert(A),
+ Remove(A),
+}
+
+#[derive(Arbitrary)]
+struct Actions<A>(Vec<Action<A>>)
+where
+ A: Ord + Clone;
+
+impl<A> Debug for Actions<A>
+where
+ A: Ord + Debug + Clone,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ let mut out = String::new();
+ let mut expected = BTreeSet::new();
+ writeln!(out, "let mut set = OrdSet::new();")?;
+ for action in &self.0 {
+ match action {
+ Action::Insert(ref value) => {
+ expected.insert(value.clone());
+ writeln!(out, "set.insert({:?});", value)?;
+ }
+ Action::Remove(ref value) => {
+ expected.remove(value);
+ writeln!(out, "set.remove({:?});", value)?;
+ }
+ }
+ }
+ writeln!(
+ out,
+ "let expected = vec!{:?};",
+ expected.into_iter().collect::<Vec<_>>()
+ )?;
+ writeln!(out, "assert_eq!(OrdSet::from(expected), set);")?;
+ write!(f, "{}", super::code_fmt(&out))
+ }
+}
+
+proptest! {
+ #[test]
+ fn comprehensive(actions: Actions<u8>) {
+ let mut set = OrdSet::new();
+ let mut nat = BTreeSet::new();
+ for action in actions.0 {
+ match action {
+ Action::Insert(value) => {
+ let len = nat.len() + if nat.contains(&value) {
+ 0
+ } else {
+ 1
+ };
+ nat.insert(value);
+ set.insert(value);
+ assert_eq!(len, set.len());
+ }
+ Action::Remove(value) => {
+ let len = nat.len() - if nat.contains(&value) {
+ 1
+ } else {
+ 0
+ };
+ nat.remove(&value);
+ set.remove(&value);
+ assert_eq!(len, set.len());
+ }
+ }
+ assert_eq!(nat.len(), set.len());
+ assert_eq!(OrdSet::from(nat.clone()), set);
+ assert!(nat.iter().eq(set.iter()));
+ }
+ }
+}
diff --git a/vendor/im-rc/src/tests/vector.rs b/vendor/im-rc/src/tests/vector.rs
new file mode 100644
index 000000000..14e312ade
--- /dev/null
+++ b/vendor/im-rc/src/tests/vector.rs
@@ -0,0 +1,231 @@
+#![allow(clippy::unit_arg)]
+
+use std::fmt::{Debug, Error, Formatter, Write};
+
+use crate::Vector;
+
+use proptest::proptest;
+use proptest_derive::Arbitrary;
+
+#[derive(Arbitrary, Debug)]
+enum Action<A> {
+ PushFront(A),
+ PushBack(A),
+ PopFront,
+ PopBack,
+ Insert(usize, A),
+ Remove(usize),
+ JoinLeft(Vec<A>),
+ JoinRight(Vec<A>),
+ SplitLeft(usize),
+ SplitRight(usize),
+}
+
+#[derive(Arbitrary)]
+struct Actions<A>(Vec<Action<A>>)
+where
+ A: Clone;
+
+impl<A> Debug for Actions<A>
+where
+ A: Debug + Clone,
+{
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ let mut out = String::new();
+ let mut expected = vec![];
+ writeln!(out, "let mut vec = Vector::new();")?;
+ for action in &self.0 {
+ match action {
+ Action::PushFront(ref value) => {
+ expected.insert(0, value.clone());
+ writeln!(out, "vec.push_front({:?});", value)?
+ }
+ Action::PushBack(ref value) => {
+ expected.push(value.clone());
+ writeln!(out, "vec.push_back({:?});", value)?
+ }
+ Action::PopFront => {
+ if !expected.is_empty() {
+ expected.remove(0);
+ }
+ writeln!(out, "vec.pop_front();")?
+ }
+ Action::PopBack => {
+ expected.pop();
+ writeln!(out, "vec.pop_back();")?
+ }
+ Action::Insert(ref index, ref value) => {
+ let index = cap_index(expected.len(), *index);
+ expected.insert(index, value.clone());
+ writeln!(out, "vec.insert({:?}, {:?});", index, value)?
+ }
+ Action::Remove(ref index) => {
+ if !expected.is_empty() {
+ let index = cap_index(expected.len(), *index);
+ expected.remove(index);
+ writeln!(out, "vec.remove({:?})", index)?
+ } else {
+ continue;
+ }
+ }
+ Action::JoinLeft(ref vec) => {
+ let mut vec_new = vec.clone();
+ vec_new.append(&mut expected);
+ expected = vec_new;
+ writeln!(
+ out,
+ "let mut vec_new = Vector::from(vec!{:?}); // size {:?}",
+ vec,
+ vec.len()
+ )?;
+ writeln!(out, "vec_new.append(vec);")?;
+ writeln!(out, "vec = vec_new;")?
+ }
+ Action::JoinRight(ref vec) => {
+ expected.append(&mut vec.clone());
+ writeln!(
+ out,
+ "vec.append(Vector::from(vec!{:?})); // size {:?}",
+ vec,
+ vec.len()
+ )?
+ }
+ Action::SplitLeft(ref index) => {
+ let index = cap_index(expected.len(), *index);
+ expected.truncate(index);
+ writeln!(out, "vec.split_off({:?});", index)?
+ }
+ Action::SplitRight(ref index) => {
+ let index = cap_index(expected.len(), *index);
+ expected = expected.split_off(index);
+ writeln!(out, "vec = vec.split_off({:?});", index)?
+ }
+ }
+ writeln!(out, "// len = {:?}", expected.len())?;
+ }
+ writeln!(out, "let expected = vec!{:?};", expected)?;
+ writeln!(out, "assert_eq!(Vector::from(expected), vec);")?;
+ write!(f, "{}", super::code_fmt(&out))
+ }
+}
+
+fn cap_index(len: usize, index: usize) -> usize {
+ if len == 0 {
+ 0
+ } else {
+ index % len
+ }
+}
+
+proptest! {
+ #[test]
+ fn comprehensive(actions: Actions<u8>) {
+ let mut vec = Vector::new();
+ let mut nat = Vec::new();
+ vec.assert_invariants();
+ for action in actions.0 {
+ match action {
+ Action::PushFront(value) => {
+ let len = vec.len();
+ nat.insert(0, value);
+ vec.push_front(value);
+ assert_eq!(len + 1, vec.len());
+ }
+ Action::PushBack(value) => {
+ let len = vec.len();
+ nat.push(value);
+ vec.push_back(value);
+ assert_eq!(len + 1, vec.len());
+ }
+ Action::PopFront => {
+ if vec.is_empty() {
+ assert_eq!(None, vec.pop_front());
+ } else {
+ let len = vec.len();
+ assert_eq!(nat.remove(0), vec.pop_front().unwrap());
+ assert_eq!(len - 1, vec.len());
+ }
+ }
+ Action::PopBack => {
+ if vec.is_empty() {
+ assert_eq!(None, vec.pop_back());
+ } else {
+ let len = vec.len();
+ assert_eq!(nat.pop(), vec.pop_back());
+ assert_eq!(len - 1, vec.len());
+ }
+ }
+ Action::Insert(index, value) => {
+ let index = cap_index(vec.len(), index);
+ let len = vec.len();
+ nat.insert(index, value);
+ vec.insert(index, value);
+ assert_eq!(len + 1, vec.len());
+ }
+ Action::Remove(index) => {
+ if vec.is_empty() {
+ continue;
+ }
+ let index = cap_index(vec.len(), index);
+ let len = vec.len();
+ assert_eq!(nat.remove(index), vec.remove(index));
+ assert_eq!(len - 1, vec.len());
+ }
+ Action::JoinLeft(mut new_nat) => {
+ let mut new_vec = new_nat.iter().cloned().collect::<Vector<_>>();
+ let add_len = new_nat.len();
+ let len = vec.len();
+ new_vec.append(vec);
+ vec = new_vec;
+ new_nat.append(&mut nat);
+ nat = new_nat;
+ assert_eq!(len + add_len, vec.len());
+ }
+ Action::JoinRight(mut new_nat) => {
+ let new_vec = new_nat.iter().cloned().collect::<Vector<_>>();
+ let add_len = new_nat.len();
+ let len = vec.len();
+ vec.append(new_vec);
+ nat.append(&mut new_nat);
+ assert_eq!(len + add_len, vec.len());
+ }
+ Action::SplitLeft(index) => {
+ let index = cap_index(vec.len(), index);
+ let len = vec.len();
+ let vec_right = vec.split_off(index);
+ let nat_right = nat.split_off(index);
+ assert_eq!(index, vec.len());
+ assert_eq!(len - index, vec_right.len());
+ assert_eq!(nat_right.iter().cloned().collect::<Vector<_>>(), vec_right);
+ }
+ Action::SplitRight(index) => {
+ let index = cap_index(vec.len(), index);
+ let len = vec.len();
+ let vec_right = vec.split_off(index);
+ let nat_right = nat.split_off(index);
+ assert_eq!(index, vec.len());
+ assert_eq!(len - index, vec_right.len());
+ assert_eq!(nat.iter().cloned().collect::<Vector<_>>(), vec);
+ vec = vec_right;
+ nat = nat_right;
+ }
+ }
+ vec.assert_invariants();
+ assert_eq!(nat.len(),vec.len());
+ assert_eq!(nat.iter().cloned().collect::<Vector<_>>(), vec);
+ }
+ }
+}
+
+#[test]
+fn test_inserts() {
+ const N: usize = 2000;
+ let mut v = Vector::new();
+ for i in 0..N {
+ v.insert(v.len() / 2, i);
+ }
+ let mut rv: Vec<usize> = Vec::new();
+ rv.extend((0..N).skip(1).step_by(2));
+ rv.extend((0..N).step_by(2).rev());
+ assert_eq!(rv.iter().cloned().collect::<Vector<_>>(), v);
+}
diff --git a/vendor/im-rc/src/util.rs b/vendor/im-rc/src/util.rs
new file mode 100644
index 000000000..5451f156d
--- /dev/null
+++ b/vendor/im-rc/src/util.rs
@@ -0,0 +1,142 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// Every codebase needs a `util` module.
+
+use std::cmp::Ordering;
+use std::ops::{Bound, IndexMut, Range, RangeBounds};
+use std::ptr;
+
+#[cfg(feature = "pool")]
+pub(crate) use refpool::{PoolClone, PoolDefault};
+
+// The `Ref` type is an alias for either `Rc` or `Arc`, user's choice.
+
+// `Arc` without refpool
+#[cfg(all(threadsafe))]
+pub(crate) use crate::fakepool::{Arc as PoolRef, Pool, PoolClone, PoolDefault};
+
+// `Ref` == `Arc` when threadsafe
+#[cfg(threadsafe)]
+pub(crate) type Ref<A> = std::sync::Arc<A>;
+
+// `Rc` without refpool
+#[cfg(all(not(threadsafe), not(feature = "pool")))]
+pub(crate) use crate::fakepool::{Pool, PoolClone, PoolDefault, Rc as PoolRef};
+
+// `Rc` with refpool
+#[cfg(all(not(threadsafe), feature = "pool"))]
+pub(crate) type PoolRef<A> = refpool::PoolRef<A>;
+#[cfg(all(not(threadsafe), feature = "pool"))]
+pub(crate) type Pool<A> = refpool::Pool<A>;
+
+// `Ref` == `Rc` when not threadsafe
+#[cfg(not(threadsafe))]
+pub(crate) type Ref<A> = std::rc::Rc<A>;
+
+pub(crate) fn clone_ref<A>(r: Ref<A>) -> A
+where
+ A: Clone,
+{
+ Ref::try_unwrap(r).unwrap_or_else(|r| (*r).clone())
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug)]
+pub(crate) enum Side {
+ Left,
+ Right,
+}
+
+/// Swap two values of anything implementing `IndexMut`.
+///
+/// Like `slice::swap`, but more generic.
+#[allow(unsafe_code)]
+pub(crate) fn swap_indices<V>(vector: &mut V, a: usize, b: usize)
+where
+ V: IndexMut<usize>,
+ V::Output: Sized,
+{
+ if a == b {
+ return;
+ }
+ // so sorry, but there's no implementation for this in std that's
+ // sufficiently generic
+ let pa: *mut V::Output = &mut vector[a];
+ let pb: *mut V::Output = &mut vector[b];
+ unsafe {
+ ptr::swap(pa, pb);
+ }
+}
+
+#[allow(dead_code)]
+pub(crate) fn linear_search_by<'a, A, I, F>(iterable: I, mut cmp: F) -> Result<usize, usize>
+where
+ A: 'a,
+ I: IntoIterator<Item = &'a A>,
+ F: FnMut(&A) -> Ordering,
+{
+ let mut pos = 0;
+ for value in iterable {
+ match cmp(value) {
+ Ordering::Equal => return Ok(pos),
+ Ordering::Greater => return Err(pos),
+ Ordering::Less => {}
+ }
+ pos += 1;
+ }
+ Err(pos)
+}
+
+pub(crate) fn to_range<R>(range: &R, right_unbounded: usize) -> Range<usize>
+where
+ R: RangeBounds<usize>,
+{
+ let start_index = match range.start_bound() {
+ Bound::Included(i) => *i,
+ Bound::Excluded(i) => *i + 1,
+ Bound::Unbounded => 0,
+ };
+ let end_index = match range.end_bound() {
+ Bound::Included(i) => *i + 1,
+ Bound::Excluded(i) => *i,
+ Bound::Unbounded => right_unbounded,
+ };
+ start_index..end_index
+}
+
+macro_rules! def_pool {
+ ($name:ident<$($arg:tt),*>, $pooltype:ty) => {
+ /// A memory pool for the appropriate node type.
+ pub struct $name<$($arg,)*>(Pool<$pooltype>);
+
+ impl<$($arg,)*> $name<$($arg,)*> {
+ /// Create a new pool with the given size.
+ pub fn new(size: usize) -> Self {
+ Self(Pool::new(size))
+ }
+
+ /// Fill the pool with preallocated chunks.
+ pub fn fill(&self) {
+ self.0.fill();
+ }
+
+ ///Get the current size of the pool.
+ pub fn pool_size(&self) -> usize {
+ self.0.get_pool_size()
+ }
+ }
+
+ impl<$($arg,)*> Default for $name<$($arg,)*> {
+ fn default() -> Self {
+ Self::new($crate::config::POOL_SIZE)
+ }
+ }
+
+ impl<$($arg,)*> Clone for $name<$($arg,)*> {
+ fn clone(&self) -> Self {
+ Self(self.0.clone())
+ }
+ }
+ };
+}
diff --git a/vendor/im-rc/src/vector/focus.rs b/vendor/im-rc/src/vector/focus.rs
new file mode 100644
index 000000000..4fdba751a
--- /dev/null
+++ b/vendor/im-rc/src/vector/focus.rs
@@ -0,0 +1,909 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use std::mem::{replace, swap};
+use std::ops::{Range, RangeBounds};
+use std::ptr::null;
+use std::sync::atomic::{AtomicPtr, Ordering};
+
+use crate::nodes::chunk::Chunk;
+use crate::sync::Lock;
+use crate::util::{to_range, PoolRef, Ref};
+use crate::vector::{
+ Iter, IterMut, RRBPool, Rrb, Vector,
+ VectorInner::{Full, Inline, Single},
+};
+
+/// Focused indexing over a [`Vector`][Vector].
+///
+/// By remembering the last tree node accessed through an index lookup and the
+/// path we took to get there, we can speed up lookups for adjacent indices
+/// tremendously. Lookups on indices in the same node are instantaneous, and
+/// lookups on sibling nodes are also very fast.
+///
+/// A `Focus` can also be used as a restricted view into a vector, using the
+/// [`narrow`][narrow] and [`split_at`][split_at] methods.
+///
+/// # When should I use a `Focus` for better performance?
+///
+/// `Focus` is useful when you need to perform a large number of index lookups
+/// that are more likely than not to be close to each other. It's usually worth
+/// using a `Focus` in any situation where you're batching a lot of index
+/// lookups together, even if they're not obviously adjacent - there's likely
+/// to be some performance gain for even completely random access.
+///
+/// If you're just iterating forwards or backwards over the [`Vector`][Vector]
+/// in order, you're better off with a regular iterator, which, in fact, is
+/// implemented using a `Focus`, but provides a simpler interface.
+///
+/// If you're just doing a very small number of index lookups, the setup cost
+/// for the `Focus` is probably not worth it.
+///
+/// A `Focus` is never faster than an index lookup on a small [`Vector`][Vector]
+/// with a length below the internal RRB tree's branching factor of 64.
+///
+/// # Examples
+///
+/// This example is contrived, as the better way to iterate forwards or
+/// backwards over a vector is with an actual iterator. Even so, the version
+/// using a `Focus` should run nearly an order of magnitude faster than the
+/// version using index lookups at a length of 1000. It should also be noted
+/// that [`vector::Iter`][Iter] is actually implemented using a `Focus` behind
+/// the scenes, so the performance of the two should be identical.
+///
+/// ```rust
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::vector::Vector;
+/// # use std::iter::FromIterator;
+/// let mut vec: Vector<i64> = Vector::from_iter(0..1000);
+///
+/// // Summing a vector, the slow way:
+/// let mut sum = 0;
+/// for i in 0..1000 {
+/// sum += *vec.get(i).unwrap();
+/// }
+/// assert_eq!(499500, sum);
+///
+/// // Summing a vector faster using a Focus:
+/// let mut sum = 0;
+/// let mut focus = vec.focus();
+/// for i in 0..1000 {
+/// sum += *focus.get(i).unwrap();
+/// }
+/// assert_eq!(499500, sum);
+///
+/// // And the easy way, for completeness:
+/// let sum: i64 = vec.iter().sum();
+/// assert_eq!(499500, sum);
+/// ```
+///
+/// [Vector]: enum.Vector.html
+/// [Iter]: struct.Iter.html
+/// [narrow]: #method.narrow
+/// [split_at]: #method.split_at
+pub enum Focus<'a, A> {
+ #[doc(hidden)]
+ Single(&'a [A]),
+ #[doc(hidden)]
+ Full(TreeFocus<A>),
+}
+
+impl<'a, A> Focus<'a, A>
+where
+ A: Clone + 'a,
+{
+ /// Construct a `Focus` for a [`Vector`][Vector].
+ ///
+ /// [Vector]: enum.Vector.html
+ pub fn new(vector: &'a Vector<A>) -> Self {
+ match &vector.vector {
+ Inline(_, chunk) => Focus::Single(chunk),
+ Single(_, chunk) => Focus::Single(chunk),
+ Full(_, tree) => Focus::Full(TreeFocus::new(tree)),
+ }
+ }
+
+ /// Get the length of the focused [`Vector`][Vector].
+ ///
+ /// [Vector]: enum.Vector.html
+ pub fn len(&self) -> usize {
+ match self {
+ Focus::Single(chunk) => chunk.len(),
+ Focus::Full(tree) => tree.len(),
+ }
+ }
+
+ /// Test if the focused [`Vector`][Vector] is empty.
+ ///
+ /// [Vector]: enum.Vector.html
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Get a reference to the value at a given index.
+ pub fn get(&mut self, index: usize) -> Option<&A> {
+ match self {
+ Focus::Single(chunk) => chunk.get(index),
+ Focus::Full(tree) => tree.get(index),
+ }
+ }
+
+ /// Get a reference to the value at a given index.
+ ///
+ /// Panics if the index is out of bounds.
+ pub fn index(&mut self, index: usize) -> &A {
+ self.get(index).expect("index out of bounds")
+ }
+
+ /// Get the chunk for the given index.
+ ///
+ /// This gives you a reference to the leaf node that contains the index,
+ /// along with its start and end indices.
+ pub fn chunk_at(&mut self, index: usize) -> (Range<usize>, &[A]) {
+ let len = self.len();
+ if index >= len {
+ panic!("vector::Focus::chunk_at: index out of bounds");
+ }
+ match self {
+ Focus::Single(chunk) => (0..len, chunk),
+ Focus::Full(tree) => tree.get_chunk(index),
+ }
+ }
+
+ /// Narrow the focus onto a subslice of the vector.
+ ///
+ /// `Focus::narrow(range)` has the same effect as `&slice[range]`, without
+ /// actually modifying the underlying vector.
+ ///
+ /// Panics if the range isn't fully inside the current focus.
+ ///
+ /// ## Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// # use std::iter::FromIterator;
+ /// let vec = Vector::from_iter(0..1000);
+ /// let narrowed = vec.focus().narrow(100..200);
+ /// let narrowed_vec = narrowed.into_iter().cloned().collect();
+ /// assert_eq!(Vector::from_iter(100..200), narrowed_vec);
+ /// ```
+ ///
+ /// [slice::split_at]: https://doc.rust-lang.org/std/primitive.slice.html#method.split_at
+ /// [Vector::split_at]: enum.Vector.html#method.split_at
+ pub fn narrow<R>(self, range: R) -> Self
+ where
+ R: RangeBounds<usize>,
+ {
+ let r = to_range(&range, self.len());
+ if r.start >= r.end || r.start >= self.len() {
+ panic!("vector::Focus::narrow: range out of bounds");
+ }
+ match self {
+ Focus::Single(chunk) => Focus::Single(&chunk[r]),
+ Focus::Full(tree) => Focus::Full(tree.narrow(r)),
+ }
+ }
+
+ /// Split the focus into two.
+ ///
+ /// Given an index `index`, consume the focus and produce two new foci, the
+ /// left onto indices `0..index`, and the right onto indices `index..N`
+ /// where `N` is the length of the current focus.
+ ///
+ /// Panics if the index is out of bounds.
+ ///
+ /// This is the moral equivalent of [`slice::split_at`][slice::split_at], in
+ /// that it leaves the underlying data structure unchanged, unlike
+ /// [`Vector::split_at`][Vector::split_at].
+ ///
+ /// ## Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// # use std::iter::FromIterator;
+ /// let vec = Vector::from_iter(0..1000);
+ /// let (left, right) = vec.focus().split_at(500);
+ /// let left_vec = left.into_iter().cloned().collect();
+ /// let right_vec = right.into_iter().cloned().collect();
+ /// assert_eq!(Vector::from_iter(0..500), left_vec);
+ /// assert_eq!(Vector::from_iter(500..1000), right_vec);
+ /// ```
+ ///
+ /// [slice::split_at]: https://doc.rust-lang.org/std/primitive.slice.html#method.split_at
+ /// [Vector::split_at]: enum.Vector.html#method.split_at
+ pub fn split_at(self, index: usize) -> (Self, Self) {
+ if index >= self.len() {
+ panic!("vector::Focus::split_at: index out of bounds");
+ }
+ match self {
+ Focus::Single(chunk) => {
+ let (left, right) = chunk.split_at(index);
+ (Focus::Single(left), Focus::Single(right))
+ }
+ Focus::Full(tree) => {
+ let (left, right) = tree.split_at(index);
+ (Focus::Full(left), Focus::Full(right))
+ }
+ }
+ }
+}
+
+impl<'a, A> IntoIterator for Focus<'a, A>
+where
+ A: Clone + 'a,
+{
+ type Item = &'a A;
+ type IntoIter = Iter<'a, A>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ Iter::from_focus(self)
+ }
+}
+
+impl<'a, A> Clone for Focus<'a, A>
+where
+ A: Clone + 'a,
+{
+ fn clone(&self) -> Self {
+ match self {
+ Focus::Single(chunk) => Focus::Single(chunk),
+ Focus::Full(tree) => Focus::Full(tree.clone()),
+ }
+ }
+}
+
+pub struct TreeFocus<A> {
+ tree: Rrb<A>,
+ view: Range<usize>,
+ middle_range: Range<usize>,
+ target_range: Range<usize>,
+ target_ptr: *const Chunk<A>,
+}
+
+impl<A> Clone for TreeFocus<A> {
+ fn clone(&self) -> Self {
+ let tree = self.tree.clone();
+ TreeFocus {
+ view: self.view.clone(),
+ middle_range: self.middle_range.clone(),
+ target_range: 0..0,
+ target_ptr: null(),
+ tree,
+ }
+ }
+}
+
+#[allow(unsafe_code)]
+#[cfg(threadsafe)]
+unsafe impl<A: Send> Send for TreeFocus<A> {}
+#[allow(unsafe_code)]
+#[cfg(threadsafe)]
+unsafe impl<A: Sync> Sync for TreeFocus<A> {}
+
+#[inline]
+fn contains<A: Ord>(range: &Range<A>, index: &A) -> bool {
+ *index >= range.start && *index < range.end
+}
+
+impl<A> TreeFocus<A>
+where
+ A: Clone,
+{
+ fn new(tree: &Rrb<A>) -> Self {
+ let middle_start = tree.outer_f.len() + tree.inner_f.len();
+ let middle_end = middle_start + tree.middle.len();
+ TreeFocus {
+ tree: tree.clone(),
+ view: 0..tree.length,
+ middle_range: middle_start..middle_end,
+ target_range: 0..0,
+ target_ptr: null(),
+ }
+ }
+
+ fn len(&self) -> usize {
+ self.view.end - self.view.start
+ }
+
+ fn narrow(self, mut view: Range<usize>) -> Self {
+ view.start += self.view.start;
+ view.end += self.view.start;
+ TreeFocus {
+ view,
+ middle_range: self.middle_range.clone(),
+ target_range: 0..0,
+ target_ptr: null(),
+ tree: self.tree,
+ }
+ }
+
+ fn split_at(self, index: usize) -> (Self, Self) {
+ let len = self.len();
+ let left = self.clone().narrow(0..index);
+ let right = self.narrow(index..len);
+ (left, right)
+ }
+
+ fn physical_index(&self, index: usize) -> usize {
+ debug_assert!(index < self.view.end);
+ self.view.start + index
+ }
+
+ fn logical_range(&self, range: &Range<usize>) -> Range<usize> {
+ (range.start - self.view.start)..(range.end - self.view.start)
+ }
+
+ fn set_focus(&mut self, index: usize) {
+ if index < self.middle_range.start {
+ let outer_len = self.tree.outer_f.len();
+ if index < outer_len {
+ self.target_range = 0..outer_len;
+ self.target_ptr = &*self.tree.outer_f;
+ } else {
+ self.target_range = outer_len..self.middle_range.start;
+ self.target_ptr = &*self.tree.inner_f;
+ }
+ } else if index >= self.middle_range.end {
+ let outer_start = self.middle_range.end + self.tree.inner_b.len();
+ if index < outer_start {
+ self.target_range = self.middle_range.end..outer_start;
+ self.target_ptr = &*self.tree.inner_b;
+ } else {
+ self.target_range = outer_start..self.tree.length;
+ self.target_ptr = &*self.tree.outer_b;
+ }
+ } else {
+ let tree_index = index - self.middle_range.start;
+ let (range, ptr) = self
+ .tree
+ .middle
+ .lookup_chunk(self.tree.middle_level, 0, tree_index);
+ self.target_range =
+ (range.start + self.middle_range.start)..(range.end + self.middle_range.start);
+ self.target_ptr = ptr;
+ }
+ }
+
+ #[allow(unsafe_code)]
+ fn get_focus(&self) -> &Chunk<A> {
+ unsafe { &*self.target_ptr }
+ }
+
+ pub fn get(&mut self, index: usize) -> Option<&A> {
+ if index >= self.len() {
+ return None;
+ }
+ let phys_index = self.physical_index(index);
+ if !contains(&self.target_range, &phys_index) {
+ self.set_focus(phys_index);
+ }
+ let target_phys_index = phys_index - self.target_range.start;
+ Some(&self.get_focus()[target_phys_index])
+ }
+
+ pub fn get_chunk(&mut self, index: usize) -> (Range<usize>, &[A]) {
+ let phys_index = self.physical_index(index);
+ if !contains(&self.target_range, &phys_index) {
+ self.set_focus(phys_index);
+ }
+ let mut slice: &[A] = self.get_focus().as_slice();
+ let mut left = 0;
+ let mut right = 0;
+ if self.target_range.start < self.view.start {
+ left = self.view.start - self.target_range.start;
+ }
+ if self.target_range.end > self.view.end {
+ right = self.target_range.end - self.view.end;
+ }
+ slice = &slice[left..(slice.len() - right)];
+ let phys_range = (self.target_range.start + left)..(self.target_range.end - right);
+ (self.logical_range(&phys_range), slice)
+ }
+}
+
+/// A mutable version of [`Focus`][Focus].
+///
+/// See [`Focus`][Focus] for more details.
+///
+/// You can only build one `FocusMut` at a time for a vector, effectively
+/// keeping a lock on the vector until you're done with the focus, which relies
+/// on the structure of the vector not changing while it exists.
+///
+/// ```rust,compile_fail
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::vector::Vector;
+/// # use std::iter::FromIterator;
+/// let mut vec = Vector::from_iter(0..1000);
+/// let focus1 = vec.focus_mut();
+/// // Fails here in 2015 edition because you're creating
+/// // two mutable references to the same thing.
+/// let focus2 = vec.focus_mut();
+/// // Fails here in 2018 edition because creating focus2
+/// // made focus1's lifetime go out of scope.
+/// assert_eq!(Some(&0), focus1.get(0));
+/// ```
+///
+/// On the other hand, you can split that one focus into multiple sub-focuses,
+/// which is safe because they can't overlap:
+///
+/// ```rust
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::vector::Vector;
+/// # use std::iter::FromIterator;
+/// let mut vec = Vector::from_iter(0..1000);
+/// let focus = vec.focus_mut();
+/// let (mut left, mut right) = focus.split_at(500);
+/// assert_eq!(Some(&0), left.get(0));
+/// assert_eq!(Some(&500), right.get(0));
+/// ```
+///
+/// These sub-foci also work as a lock on the vector, even if the focus they
+/// were created from goes out of scope.
+///
+/// ```rust,compile_fail
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::vector::Vector;
+/// # use std::iter::FromIterator;
+/// let mut vec = Vector::from_iter(0..1000);
+/// let (left, right) = {
+/// let focus = vec.focus_mut();
+/// focus.split_at(500)
+/// };
+/// // `left` and `right` are still in scope even if `focus` isn't, so we can't
+/// // create another focus:
+/// let focus2 = vec.focus_mut();
+/// assert_eq!(Some(&0), left.get(0));
+/// ```
+///
+/// [Focus]: enum.Focus.html
+pub enum FocusMut<'a, A> {
+ #[doc(hidden)]
+ Single(RRBPool<A>, &'a mut [A]),
+ #[doc(hidden)]
+ Full(RRBPool<A>, TreeFocusMut<'a, A>),
+}
+
+impl<'a, A> FocusMut<'a, A>
+where
+ A: Clone + 'a,
+{
+ /// Construct a `FocusMut` for a `Vector`.
+ pub fn new(vector: &'a mut Vector<A>) -> Self {
+ match &mut vector.vector {
+ Inline(pool, chunk) => FocusMut::Single(pool.clone(), chunk),
+ Single(pool, chunk) => FocusMut::Single(
+ pool.clone(),
+ PoolRef::make_mut(&pool.value_pool, chunk).as_mut_slice(),
+ ),
+ Full(pool, tree) => FocusMut::Full(pool.clone(), TreeFocusMut::new(tree)),
+ }
+ }
+
+ /// Get the length of the focused `Vector`.
+ pub fn len(&self) -> usize {
+ match self {
+ FocusMut::Single(_, chunk) => chunk.len(),
+ FocusMut::Full(_, tree) => tree.len(),
+ }
+ }
+
+ /// Test if the focused `Vector` is empty.
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Get a reference to the value at a given index.
+ pub fn get(&mut self, index: usize) -> Option<&A> {
+ self.get_mut(index).map(|r| &*r)
+ }
+
+ /// Get a mutable reference to the value at a given index.
+ pub fn get_mut(&mut self, index: usize) -> Option<&mut A> {
+ match self {
+ FocusMut::Single(_, chunk) => chunk.get_mut(index),
+ FocusMut::Full(pool, tree) => tree.get(pool, index),
+ }
+ }
+
+ /// Get a reference to the value at a given index.
+ ///
+ /// Panics if the index is out of bounds.
+ pub fn index(&mut self, index: usize) -> &A {
+ &*self.index_mut(index)
+ }
+
+ /// Get a mutable reference to the value at a given index.
+ ///
+ /// Panics if the index is out of bounds.
+ #[allow(clippy::should_implement_trait)] // would if I could
+ pub fn index_mut(&mut self, index: usize) -> &mut A {
+ self.get_mut(index).expect("index out of bounds")
+ }
+
+ /// Update the value at a given index.
+ ///
+ /// Returns `None` if the index is out of bounds, or the replaced value
+ /// otherwise.
+ pub fn set(&mut self, index: usize, value: A) -> Option<A> {
+ self.get_mut(index).map(|pos| replace(pos, value))
+ }
+
+ /// Swap the values at two given indices.
+ ///
+ /// Panics if either index is out of bounds.
+ ///
+ /// If the indices are equal, this function returns without doing anything.
+ pub fn swap(&mut self, a: usize, b: usize) {
+ if a == b {
+ return;
+ }
+ self.pair(a, b, |left, right| swap(left, right));
+ }
+
+ /// Lookup two indices simultaneously and run a function over them.
+ ///
+ /// Useful because the borrow checker won't let you have more than one
+ /// mutable reference into the same data structure at any given time.
+ ///
+ /// Panics if either index is out of bounds, or if they are the same index.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// # use std::iter::FromIterator;
+ /// let mut vec = vector![1, 2, 3, 4, 5];
+ /// vec.focus_mut().pair(1, 3, |a, b| *a += *b);
+ /// assert_eq!(vector![1, 6, 3, 4, 5], vec);
+ /// ```
+ #[allow(unsafe_code)]
+ pub fn pair<F, B>(&mut self, a: usize, b: usize, mut f: F) -> B
+ where
+ F: FnMut(&mut A, &mut A) -> B,
+ {
+ if a == b {
+ panic!("vector::FocusMut::pair: indices cannot be equal!");
+ }
+ let pa: *mut A = self.index_mut(a);
+ let pb: *mut A = self.index_mut(b);
+ unsafe { f(&mut *pa, &mut *pb) }
+ }
+
+ /// Lookup three indices simultaneously and run a function over them.
+ ///
+ /// Useful because the borrow checker won't let you have more than one
+ /// mutable reference into the same data structure at any given time.
+ ///
+ /// Panics if any index is out of bounds, or if any indices are equal.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// # use std::iter::FromIterator;
+ /// let mut vec = vector![1, 2, 3, 4, 5];
+ /// vec.focus_mut().triplet(0, 2, 4, |a, b, c| *a += *b + *c);
+ /// assert_eq!(vector![9, 2, 3, 4, 5], vec);
+ /// ```
+ #[allow(unsafe_code)]
+ pub fn triplet<F, B>(&mut self, a: usize, b: usize, c: usize, mut f: F) -> B
+ where
+ F: FnMut(&mut A, &mut A, &mut A) -> B,
+ {
+ if a == b || b == c || a == c {
+ panic!("vector::FocusMut::triplet: indices cannot be equal!");
+ }
+ let pa: *mut A = self.index_mut(a);
+ let pb: *mut A = self.index_mut(b);
+ let pc: *mut A = self.index_mut(c);
+ unsafe { f(&mut *pa, &mut *pb, &mut *pc) }
+ }
+
+ /// Get the chunk for the given index.
+ ///
+ /// This gives you a reference to the leaf node that contains the index,
+ /// along with its start and end indices.
+ pub fn chunk_at(&mut self, index: usize) -> (Range<usize>, &mut [A]) {
+ let len = self.len();
+ if index >= len {
+ panic!("vector::FocusMut::chunk_at: index out of bounds");
+ }
+ match self {
+ FocusMut::Single(_, chunk) => (0..len, chunk),
+ FocusMut::Full(pool, tree) => {
+ let (range, chunk) = tree.get_chunk(pool, index);
+ (range, chunk)
+ }
+ }
+ }
+
+ /// Narrow the focus onto a subslice of the vector.
+ ///
+ /// `FocusMut::narrow(range)` has the same effect as `&slice[range]`, without
+ /// actually modifying the underlying vector.
+ ///
+ /// Panics if the range isn't fully inside the current focus.
+ ///
+ /// ## Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// # use std::iter::FromIterator;
+ /// let mut vec = Vector::from_iter(0..1000);
+ /// let narrowed = vec.focus_mut().narrow(100..200);
+ /// let narrowed_vec = narrowed.unmut().into_iter().cloned().collect();
+ /// assert_eq!(Vector::from_iter(100..200), narrowed_vec);
+ /// ```
+ ///
+ /// [slice::split_at]: https://doc.rust-lang.org/std/primitive.slice.html#method.split_at
+ /// [Vector::split_at]: enum.Vector.html#method.split_at
+ pub fn narrow<R>(self, range: R) -> Self
+ where
+ R: RangeBounds<usize>,
+ {
+ let r = to_range(&range, self.len());
+ if r.start > r.end || r.start > self.len() {
+ panic!("vector::FocusMut::narrow: range out of bounds");
+ }
+ match self {
+ FocusMut::Single(pool, chunk) => FocusMut::Single(pool, &mut chunk[r]),
+ FocusMut::Full(pool, tree) => FocusMut::Full(pool, tree.narrow(r)),
+ }
+ }
+
+ /// Split the focus into two.
+ ///
+ /// Given an index `index`, consume the focus and produce two new foci, the
+ /// left onto indices `0..index`, and the right onto indices `index..N`
+ /// where `N` is the length of the current focus.
+ ///
+ /// Panics if the index is out of bounds.
+ ///
+ /// This is the moral equivalent of [`slice::split_at`][slice::split_at], in
+ /// that it leaves the underlying data structure unchanged, unlike
+ /// [`Vector::split_at`][Vector::split_at].
+ ///
+ /// ## Examples
+ ///
+ /// ```rust
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// # use std::iter::FromIterator;
+ /// let mut vec = Vector::from_iter(0..1000);
+ /// {
+ /// let (left, right) = vec.focus_mut().split_at(500);
+ /// for ptr in left {
+ /// *ptr += 100;
+ /// }
+ /// for ptr in right {
+ /// *ptr -= 100;
+ /// }
+ /// }
+ /// let expected = Vector::from_iter(100..600)
+ /// + Vector::from_iter(400..900);
+ /// assert_eq!(expected, vec);
+ /// ```
+ ///
+ /// [slice::split_at]: https://doc.rust-lang.org/std/primitive.slice.html#method.split_at
+ /// [Vector::split_at]: enum.Vector.html#method.split_at
+ #[allow(clippy::redundant_clone)]
+ pub fn split_at(self, index: usize) -> (Self, Self) {
+ if index > self.len() {
+ panic!("vector::FocusMut::split_at: index out of bounds");
+ }
+ match self {
+ FocusMut::Single(pool, chunk) => {
+ let (left, right) = chunk.split_at_mut(index);
+ (
+ FocusMut::Single(pool.clone(), left),
+ FocusMut::Single(pool, right),
+ )
+ }
+ FocusMut::Full(pool, tree) => {
+ let (left, right) = tree.split_at(index);
+ (
+ FocusMut::Full(pool.clone(), left),
+ FocusMut::Full(pool, right),
+ )
+ }
+ }
+ }
+
+ /// Convert a `FocusMut` into a `Focus`.
+ pub fn unmut(self) -> Focus<'a, A> {
+ match self {
+ FocusMut::Single(_, chunk) => Focus::Single(chunk),
+ FocusMut::Full(_, mut tree) => Focus::Full(TreeFocus {
+ tree: {
+ let t = tree.tree.lock().unwrap();
+ (*t).clone()
+ },
+ view: tree.view.clone(),
+ middle_range: tree.middle_range.clone(),
+ target_range: 0..0,
+ target_ptr: null(),
+ }),
+ }
+ }
+}
+
+impl<'a, A> IntoIterator for FocusMut<'a, A>
+where
+ A: Clone + 'a,
+{
+ type Item = &'a mut A;
+ type IntoIter = IterMut<'a, A>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ IterMut::from_focus(self)
+ }
+}
+
+impl<'a, A> From<FocusMut<'a, A>> for Focus<'a, A>
+where
+ A: Clone + 'a,
+{
+ fn from(f: FocusMut<'a, A>) -> Self {
+ f.unmut()
+ }
+}
+
+pub struct TreeFocusMut<'a, A> {
+ tree: Lock<&'a mut Rrb<A>>,
+ view: Range<usize>,
+ middle_range: Range<usize>,
+ target_range: Range<usize>,
+ target_ptr: AtomicPtr<Chunk<A>>,
+}
+
+impl<'a, A> TreeFocusMut<'a, A>
+where
+ A: Clone + 'a,
+{
+ fn new(tree: &'a mut Rrb<A>) -> Self {
+ let middle_start = tree.outer_f.len() + tree.inner_f.len();
+ let middle_end = middle_start + tree.middle.len();
+ TreeFocusMut {
+ view: 0..tree.length,
+ tree: Lock::new(tree),
+ middle_range: middle_start..middle_end,
+ target_range: 0..0,
+ target_ptr: AtomicPtr::default(),
+ }
+ }
+
+ fn len(&self) -> usize {
+ self.view.end - self.view.start
+ }
+
+ fn narrow(self, mut view: Range<usize>) -> Self {
+ view.start += self.view.start;
+ view.end += self.view.start;
+ TreeFocusMut {
+ view,
+ middle_range: self.middle_range.clone(),
+ target_range: 0..0,
+ target_ptr: AtomicPtr::default(),
+ tree: self.tree,
+ }
+ }
+
+ fn split_at(self, index: usize) -> (Self, Self) {
+ let len = self.len();
+ debug_assert!(index <= len);
+ #[allow(unsafe_code)]
+ let left = TreeFocusMut {
+ view: self.view.start..(self.view.start + index),
+ middle_range: self.middle_range.clone(),
+ target_range: 0..0,
+ target_ptr: AtomicPtr::default(),
+ tree: self.tree.clone(),
+ };
+ let right = TreeFocusMut {
+ view: (self.view.start + index)..(self.view.start + len),
+ middle_range: self.middle_range.clone(),
+ target_range: 0..0,
+ target_ptr: AtomicPtr::default(),
+ tree: self.tree,
+ };
+ (left, right)
+ }
+
+ fn physical_index(&self, index: usize) -> usize {
+ debug_assert!(index < self.view.end);
+ self.view.start + index
+ }
+
+ fn logical_range(&self, range: &Range<usize>) -> Range<usize> {
+ (range.start - self.view.start)..(range.end - self.view.start)
+ }
+
+ fn set_focus(&mut self, pool: &RRBPool<A>, index: usize) {
+ let mut tree = self
+ .tree
+ .lock()
+ .expect("im::vector::Focus::set_focus: unable to acquire exclusive lock on Vector");
+ if index < self.middle_range.start {
+ let outer_len = tree.outer_f.len();
+ if index < outer_len {
+ self.target_range = 0..outer_len;
+ self.target_ptr.store(
+ PoolRef::make_mut(&pool.value_pool, &mut tree.outer_f),
+ Ordering::Relaxed,
+ );
+ } else {
+ self.target_range = outer_len..self.middle_range.start;
+ self.target_ptr.store(
+ PoolRef::make_mut(&pool.value_pool, &mut tree.inner_f),
+ Ordering::Relaxed,
+ );
+ }
+ } else if index >= self.middle_range.end {
+ let outer_start = self.middle_range.end + tree.inner_b.len();
+ if index < outer_start {
+ self.target_range = self.middle_range.end..outer_start;
+ self.target_ptr.store(
+ PoolRef::make_mut(&pool.value_pool, &mut tree.inner_b),
+ Ordering::Relaxed,
+ );
+ } else {
+ self.target_range = outer_start..tree.length;
+ self.target_ptr.store(
+ PoolRef::make_mut(&pool.value_pool, &mut tree.outer_b),
+ Ordering::Relaxed,
+ );
+ }
+ } else {
+ let tree_index = index - self.middle_range.start;
+ let level = tree.middle_level;
+ let middle = Ref::make_mut(&mut tree.middle);
+ let (range, ptr) = middle.lookup_chunk_mut(pool, level, 0, tree_index);
+ self.target_range =
+ (range.start + self.middle_range.start)..(range.end + self.middle_range.start);
+ self.target_ptr.store(ptr, Ordering::Relaxed);
+ }
+ }
+
+ #[allow(unsafe_code)]
+ fn get_focus(&mut self) -> &mut Chunk<A> {
+ unsafe { &mut *self.target_ptr.load(Ordering::Relaxed) }
+ }
+
+ pub fn get(&mut self, pool: &RRBPool<A>, index: usize) -> Option<&mut A> {
+ if index >= self.len() {
+ return None;
+ }
+ let phys_index = self.physical_index(index);
+ if !contains(&self.target_range, &phys_index) {
+ self.set_focus(pool, phys_index);
+ }
+ let target_phys_index = phys_index - self.target_range.start;
+ Some(&mut self.get_focus()[target_phys_index])
+ }
+
+ pub fn get_chunk(&mut self, pool: &RRBPool<A>, index: usize) -> (Range<usize>, &mut [A]) {
+ let phys_index = self.physical_index(index);
+ if !contains(&self.target_range, &phys_index) {
+ self.set_focus(pool, phys_index);
+ }
+ let mut left = 0;
+ let mut right = 0;
+ if self.target_range.start < self.view.start {
+ left = self.view.start - self.target_range.start;
+ }
+ if self.target_range.end > self.view.end {
+ right = self.target_range.end - self.view.end;
+ }
+ let phys_range = (self.target_range.start + left)..(self.target_range.end - right);
+ let log_range = self.logical_range(&phys_range);
+ let slice_len = self.get_focus().len();
+ let slice = &mut (self.get_focus().as_mut_slice())[left..(slice_len - right)];
+ (log_range, slice)
+ }
+}
diff --git a/vendor/im-rc/src/vector/mod.rs b/vendor/im-rc/src/vector/mod.rs
new file mode 100644
index 000000000..a2ce0adf7
--- /dev/null
+++ b/vendor/im-rc/src/vector/mod.rs
@@ -0,0 +1,2745 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+//! A persistent vector.
+//!
+//! This is a sequence of elements in insertion order - if you need a
+//! list of things, any kind of list of things, this is what you're
+//! looking for.
+//!
+//! It's implemented as an [RRB vector][rrbpaper] with [smart
+//! head/tail chunking][chunkedseq]. In performance terms, this means
+//! that practically every operation is O(log n), except push/pop on
+//! both sides, which will be O(1) amortised, and O(log n) in the
+//! worst case. In practice, the push/pop operations will be
+//! blindingly fast, nearly on par with the native
+//! [`VecDeque`][VecDeque], and other operations will have decent, if
+//! not high, performance, but they all have more or less the same
+//! O(log n) complexity, so you don't need to keep their performance
+//! characteristics in mind - everything, even splitting and merging,
+//! is safe to use and never too slow.
+//!
+//! ## Performance Notes
+//!
+//! Because of the head/tail chunking technique, until you push a
+//! number of items above double the tree's branching factor (that's
+//! `self.len()` = 2 × *k* (where *k* = 64) = 128) on either side, the
+//! data structure is still just a handful of arrays, not yet an RRB
+//! tree, so you'll see performance and memory characteristics fairly
+//! close to [`Vec`][Vec] or [`VecDeque`][VecDeque].
+//!
+//! This means that the structure always preallocates four chunks of
+//! size *k* (*k* being the tree's branching factor), equivalent to a
+//! [`Vec`][Vec] with an initial capacity of 256. Beyond that, it will
+//! allocate tree nodes of capacity *k* as needed.
+//!
+//! In addition, vectors start out as single chunks, and only expand into the
+//! full data structure once you go past the chunk size. This makes them
+//! perform identically to [`Vec`][Vec] at small sizes.
+//!
+//! [rrbpaper]: https://infoscience.epfl.ch/record/213452/files/rrbvector.pdf
+//! [chunkedseq]: http://deepsea.inria.fr/pasl/chunkedseq.pdf
+//! [Vec]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+//! [VecDeque]: https://doc.rust-lang.org/std/collections/struct.VecDeque.html
+
+use std::borrow::Borrow;
+use std::cmp::Ordering;
+use std::fmt::{Debug, Error, Formatter};
+use std::hash::{Hash, Hasher};
+use std::iter::Sum;
+use std::iter::{FromIterator, FusedIterator};
+use std::mem::{replace, swap};
+use std::ops::{Add, Index, IndexMut, RangeBounds};
+
+use sized_chunks::InlineArray;
+
+use crate::nodes::chunk::{Chunk, CHUNK_SIZE};
+use crate::nodes::rrb::{Node, PopResult, PushResult, SplitResult};
+use crate::sort;
+use crate::util::{clone_ref, swap_indices, to_range, Pool, PoolDefault, PoolRef, Ref, Side};
+
+use self::VectorInner::{Full, Inline, Single};
+
+mod focus;
+
+pub use self::focus::{Focus, FocusMut};
+
+mod pool;
+pub use self::pool::RRBPool;
+
+#[cfg(all(threadsafe, any(test, feature = "rayon")))]
+pub mod rayon;
+
+/// Construct a vector from a sequence of elements.
+///
+/// # Examples
+///
+/// ```
+/// # #[macro_use] extern crate im_rc as im;
+/// # use im::vector::Vector;
+/// # fn main() {
+/// assert_eq!(
+/// vector![1, 2, 3],
+/// Vector::from(vec![1, 2, 3])
+/// );
+/// # }
+/// ```
+#[macro_export]
+macro_rules! vector {
+ () => { $crate::vector::Vector::new() };
+
+ ( $($x:expr),* ) => {{
+ let mut l = $crate::vector::Vector::new();
+ $(
+ l.push_back($x);
+ )*
+ l
+ }};
+
+ ( $($x:expr ,)* ) => {{
+ let mut l = $crate::vector::Vector::new();
+ $(
+ l.push_back($x);
+ )*
+ l
+ }};
+}
+
+/// A persistent vector.
+///
+/// This is a sequence of elements in insertion order - if you need a list of
+/// things, any kind of list of things, this is what you're looking for.
+///
+/// It's implemented as an [RRB vector][rrbpaper] with [smart head/tail
+/// chunking][chunkedseq]. In performance terms, this means that practically
+/// every operation is O(log n), except push/pop on both sides, which will be
+/// O(1) amortised, and O(log n) in the worst case. In practice, the push/pop
+/// operations will be blindingly fast, nearly on par with the native
+/// [`VecDeque`][VecDeque], and other operations will have decent, if not high,
+/// performance, but they all have more or less the same O(log n) complexity, so
+/// you don't need to keep their performance characteristics in mind -
+/// everything, even splitting and merging, is safe to use and never too slow.
+///
+/// ## Performance Notes
+///
+/// Because of the head/tail chunking technique, until you push a number of
+/// items above double the tree's branching factor (that's `self.len()` = 2 ×
+/// *k* (where *k* = 64) = 128) on either side, the data structure is still just
+/// a handful of arrays, not yet an RRB tree, so you'll see performance and
+/// memory characteristics similar to [`Vec`][Vec] or [`VecDeque`][VecDeque].
+///
+/// This means that the structure always preallocates four chunks of size *k*
+/// (*k* being the tree's branching factor), equivalent to a [`Vec`][Vec] with
+/// an initial capacity of 256. Beyond that, it will allocate tree nodes of
+/// capacity *k* as needed.
+///
+/// In addition, vectors start out as single chunks, and only expand into the
+/// full data structure once you go past the chunk size. This makes them
+/// perform identically to [`Vec`][Vec] at small sizes.
+///
+/// [rrbpaper]: https://infoscience.epfl.ch/record/213452/files/rrbvector.pdf
+/// [chunkedseq]: http://deepsea.inria.fr/pasl/chunkedseq.pdf
+/// [Vec]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+/// [VecDeque]: https://doc.rust-lang.org/std/collections/struct.VecDeque.html
+pub struct Vector<A> {
+ vector: VectorInner<A>,
+}
+
+enum VectorInner<A> {
+ Inline(RRBPool<A>, InlineArray<A, Rrb<A>>),
+ Single(RRBPool<A>, PoolRef<Chunk<A>>),
+ Full(RRBPool<A>, Rrb<A>),
+}
+
+#[doc(hidden)]
+pub struct Rrb<A> {
+ length: usize,
+ middle_level: usize,
+ outer_f: PoolRef<Chunk<A>>,
+ inner_f: PoolRef<Chunk<A>>,
+ middle: Ref<Node<A>>,
+ inner_b: PoolRef<Chunk<A>>,
+ outer_b: PoolRef<Chunk<A>>,
+}
+
+impl<A> Clone for Rrb<A> {
+ fn clone(&self) -> Self {
+ Rrb {
+ length: self.length,
+ middle_level: self.middle_level,
+ outer_f: self.outer_f.clone(),
+ inner_f: self.inner_f.clone(),
+ middle: self.middle.clone(),
+ inner_b: self.inner_b.clone(),
+ outer_b: self.outer_b.clone(),
+ }
+ }
+}
+
+impl<A: Clone> Vector<A> {
+ /// Get a reference to the memory pool this `Vector` is using.
+ ///
+ /// Note that if you didn't specifically construct it with a pool, you'll
+ /// get back a reference to a pool of size 0.
+ #[cfg_attr(not(feature = "pool"), doc(hidden))]
+ pub fn pool(&self) -> &RRBPool<A> {
+ match self.vector {
+ Inline(ref pool, _) => pool,
+ Single(ref pool, _) => pool,
+ Full(ref pool, _) => pool,
+ }
+ }
+
+ /// True if a vector is a full inline or single chunk, ie. must be promoted
+ /// to grow further.
+ fn needs_promotion(&self) -> bool {
+ match &self.vector {
+ Inline(_, chunk) if chunk.is_full() => true,
+ Single(_, chunk) if chunk.is_full() => true,
+ _ => false,
+ }
+ }
+
+ /// Promote an inline to a single.
+ fn promote_inline(&mut self) {
+ if let Inline(pool, chunk) = &mut self.vector {
+ self.vector = Single(pool.clone(), PoolRef::new(&pool.value_pool, chunk.into()));
+ }
+ }
+
+ /// Promote a single to a full, with the single chunk becoming inner_f, or
+ /// promote an inline to a single.
+ fn promote_front(&mut self) {
+ self.vector = match &mut self.vector {
+ Inline(pool, chunk) => {
+ Single(pool.clone(), PoolRef::new(&pool.value_pool, chunk.into()))
+ }
+ Single(pool, chunk) => {
+ let chunk = chunk.clone();
+ Full(
+ pool.clone(),
+ Rrb {
+ length: chunk.len(),
+ middle_level: 0,
+ outer_f: PoolRef::default(&pool.value_pool),
+ inner_f: chunk,
+ middle: Ref::new(Node::new()),
+ inner_b: PoolRef::default(&pool.value_pool),
+ outer_b: PoolRef::default(&pool.value_pool),
+ },
+ )
+ }
+ Full(_, _) => return,
+ }
+ }
+
+ /// Promote a single to a full, with the single chunk becoming inner_b, or
+ /// promote an inline to a single.
+ fn promote_back(&mut self) {
+ self.vector = match &mut self.vector {
+ Inline(pool, chunk) => {
+ Single(pool.clone(), PoolRef::new(&pool.value_pool, chunk.into()))
+ }
+ Single(pool, chunk) => {
+ let chunk = chunk.clone();
+ Full(
+ pool.clone(),
+ Rrb {
+ length: chunk.len(),
+ middle_level: 0,
+ outer_f: PoolRef::default(&pool.value_pool),
+ inner_f: PoolRef::default(&pool.value_pool),
+ middle: Ref::new(Node::new()),
+ inner_b: chunk,
+ outer_b: PoolRef::default(&pool.value_pool),
+ },
+ )
+ }
+ Full(_, _) => return,
+ }
+ }
+
+ /// Construct an empty vector.
+ #[must_use]
+ pub fn new() -> Self {
+ Self {
+ vector: Inline(RRBPool::default(), InlineArray::new()),
+ }
+ }
+
+ /// Construct an empty vector using a specific memory pool.
+ #[cfg(feature = "pool")]
+ #[must_use]
+ pub fn with_pool(pool: &RRBPool<A>) -> Self {
+ Self {
+ vector: Inline(pool.clone(), InlineArray::new()),
+ }
+ }
+
+ /// Get the length of a vector.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// assert_eq!(5, vector![1, 2, 3, 4, 5].len());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn len(&self) -> usize {
+ match &self.vector {
+ Inline(_, chunk) => chunk.len(),
+ Single(_, chunk) => chunk.len(),
+ Full(_, tree) => tree.length,
+ }
+ }
+
+ /// Test whether a vector is empty.
+ ///
+ /// Time: O(1)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let vec = vector!["Joe", "Mike", "Robert"];
+ /// assert_eq!(false, vec.is_empty());
+ /// assert_eq!(true, Vector::<i32>::new().is_empty());
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Test whether a vector is currently inlined.
+ ///
+ /// Vectors small enough that their contents could be stored entirely inside
+ /// the space of `std::mem::size_of::<Vector<A>>()` bytes are stored inline on
+ /// the stack instead of allocating any chunks. This method returns `true` if
+ /// this vector is currently inlined, or `false` if it currently has chunks allocated
+ /// on the heap.
+ ///
+ /// This may be useful in conjunction with [`ptr_eq()`][ptr_eq], which checks if
+ /// two vectors' heap allocations are the same, and thus will never return `true`
+ /// for inlined vectors.
+ ///
+ /// Time: O(1)
+ ///
+ /// [ptr_eq]: #method.ptr_eq
+ #[inline]
+ #[must_use]
+ pub fn is_inline(&self) -> bool {
+ matches!(&self.vector, Inline(_, _))
+ }
+
+ /// Test whether two vectors refer to the same content in memory.
+ ///
+ /// This uses the following rules to determine equality:
+ /// * If the two sides are references to the same vector, return true.
+ /// * If the two sides are single chunk vectors pointing to the same chunk, return true.
+ /// * If the two sides are full trees pointing to the same chunks, return true.
+ ///
+ /// This would return true if you're comparing a vector to itself, or
+ /// if you're comparing a vector to a fresh clone of itself. The exception to this is
+ /// if you've cloned an inline array (ie. an array with so few elements they can fit
+ /// inside the space a `Vector` allocates for its pointers, so there are no heap allocations
+ /// to compare).
+ ///
+ /// Time: O(1)
+ #[must_use]
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ fn cmp_chunk<A>(left: &PoolRef<Chunk<A>>, right: &PoolRef<Chunk<A>>) -> bool {
+ (left.is_empty() && right.is_empty()) || PoolRef::ptr_eq(left, right)
+ }
+
+ if std::ptr::eq(self, other) {
+ return true;
+ }
+
+ match (&self.vector, &other.vector) {
+ (Single(_, left), Single(_, right)) => cmp_chunk(left, right),
+ (Full(_, left), Full(_, right)) => {
+ cmp_chunk(&left.outer_f, &right.outer_f)
+ && cmp_chunk(&left.inner_f, &right.inner_f)
+ && cmp_chunk(&left.inner_b, &right.inner_b)
+ && cmp_chunk(&left.outer_b, &right.outer_b)
+ && ((left.middle.is_empty() && right.middle.is_empty())
+ || Ref::ptr_eq(&left.middle, &right.middle))
+ }
+ _ => false,
+ }
+ }
+
+ /// Get an iterator over a vector.
+ ///
+ /// Time: O(1)
+ #[inline]
+ #[must_use]
+ pub fn iter(&self) -> Iter<'_, A> {
+ Iter::new(self)
+ }
+
+ /// Get a mutable iterator over a vector.
+ ///
+ /// Time: O(1)
+ #[inline]
+ #[must_use]
+ pub fn iter_mut(&mut self) -> IterMut<'_, A> {
+ IterMut::new(self)
+ }
+
+ /// Get an iterator over the leaf nodes of a vector.
+ ///
+ /// This returns an iterator over the [`Chunk`s][Chunk] at the leaves of the
+ /// RRB tree. These are useful for efficient parallelisation of work on
+ /// the vector, but should not be used for basic iteration.
+ ///
+ /// Time: O(1)
+ ///
+ /// [Chunk]: ../chunk/struct.Chunk.html
+ #[inline]
+ #[must_use]
+ pub fn leaves(&self) -> Chunks<'_, A> {
+ Chunks::new(self)
+ }
+
+ /// Get a mutable iterator over the leaf nodes of a vector.
+ //
+ /// This returns an iterator over the [`Chunk`s][Chunk] at the leaves of the
+ /// RRB tree. These are useful for efficient parallelisation of work on
+ /// the vector, but should not be used for basic iteration.
+ ///
+ /// Time: O(1)
+ ///
+ /// [Chunk]: ../chunk/struct.Chunk.html
+ #[inline]
+ #[must_use]
+ pub fn leaves_mut(&mut self) -> ChunksMut<'_, A> {
+ ChunksMut::new(self)
+ }
+
+ /// Construct a [`Focus`][Focus] for a vector.
+ ///
+ /// Time: O(1)
+ ///
+ /// [Focus]: enum.Focus.html
+ #[inline]
+ #[must_use]
+ pub fn focus(&self) -> Focus<'_, A> {
+ Focus::new(self)
+ }
+
+ /// Construct a [`FocusMut`][FocusMut] for a vector.
+ ///
+ /// Time: O(1)
+ ///
+ /// [FocusMut]: enum.FocusMut.html
+ #[inline]
+ #[must_use]
+ pub fn focus_mut(&mut self) -> FocusMut<'_, A> {
+ FocusMut::new(self)
+ }
+
+ /// Get a reference to the value at index `index` in a vector.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let vec = vector!["Joe", "Mike", "Robert"];
+ /// assert_eq!(Some(&"Robert"), vec.get(2));
+ /// assert_eq!(None, vec.get(5));
+ /// ```
+ #[must_use]
+ pub fn get(&self, index: usize) -> Option<&A> {
+ if index >= self.len() {
+ return None;
+ }
+
+ match &self.vector {
+ Inline(_, chunk) => chunk.get(index),
+ Single(_, chunk) => chunk.get(index),
+ Full(_, tree) => {
+ let mut local_index = index;
+
+ if local_index < tree.outer_f.len() {
+ return Some(&tree.outer_f[local_index]);
+ }
+ local_index -= tree.outer_f.len();
+
+ if local_index < tree.inner_f.len() {
+ return Some(&tree.inner_f[local_index]);
+ }
+ local_index -= tree.inner_f.len();
+
+ if local_index < tree.middle.len() {
+ return Some(tree.middle.index(tree.middle_level, local_index));
+ }
+ local_index -= tree.middle.len();
+
+ if local_index < tree.inner_b.len() {
+ return Some(&tree.inner_b[local_index]);
+ }
+ local_index -= tree.inner_b.len();
+
+ Some(&tree.outer_b[local_index])
+ }
+ }
+ }
+
+ /// Get a mutable reference to the value at index `index` in a
+ /// vector.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector!["Joe", "Mike", "Robert"];
+ /// {
+ /// let robert = vec.get_mut(2).unwrap();
+ /// assert_eq!(&mut "Robert", robert);
+ /// *robert = "Bjarne";
+ /// }
+ /// assert_eq!(vector!["Joe", "Mike", "Bjarne"], vec);
+ /// ```
+ #[must_use]
+ pub fn get_mut(&mut self, index: usize) -> Option<&mut A> {
+ if index >= self.len() {
+ return None;
+ }
+
+ match &mut self.vector {
+ Inline(_, chunk) => chunk.get_mut(index),
+ Single(pool, chunk) => PoolRef::make_mut(&pool.value_pool, chunk).get_mut(index),
+ Full(pool, tree) => {
+ let mut local_index = index;
+
+ if local_index < tree.outer_f.len() {
+ let outer_f = PoolRef::make_mut(&pool.value_pool, &mut tree.outer_f);
+ return Some(&mut outer_f[local_index]);
+ }
+ local_index -= tree.outer_f.len();
+
+ if local_index < tree.inner_f.len() {
+ let inner_f = PoolRef::make_mut(&pool.value_pool, &mut tree.inner_f);
+ return Some(&mut inner_f[local_index]);
+ }
+ local_index -= tree.inner_f.len();
+
+ if local_index < tree.middle.len() {
+ let middle = Ref::make_mut(&mut tree.middle);
+ return Some(middle.index_mut(pool, tree.middle_level, local_index));
+ }
+ local_index -= tree.middle.len();
+
+ if local_index < tree.inner_b.len() {
+ let inner_b = PoolRef::make_mut(&pool.value_pool, &mut tree.inner_b);
+ return Some(&mut inner_b[local_index]);
+ }
+ local_index -= tree.inner_b.len();
+
+ let outer_b = PoolRef::make_mut(&pool.value_pool, &mut tree.outer_b);
+ Some(&mut outer_b[local_index])
+ }
+ }
+ }
+
+ /// Get the first element of a vector.
+ ///
+ /// If the vector is empty, `None` is returned.
+ ///
+ /// Time: O(log n)
+ #[inline]
+ #[must_use]
+ pub fn front(&self) -> Option<&A> {
+ self.get(0)
+ }
+
+ /// Get a mutable reference to the first element of a vector.
+ ///
+ /// If the vector is empty, `None` is returned.
+ ///
+ /// Time: O(log n)
+ #[inline]
+ #[must_use]
+ pub fn front_mut(&mut self) -> Option<&mut A> {
+ self.get_mut(0)
+ }
+
+ /// Get the first element of a vector.
+ ///
+ /// If the vector is empty, `None` is returned.
+ ///
+ /// This is an alias for the [`front`][front] method.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [front]: #method.front
+ #[inline]
+ #[must_use]
+ pub fn head(&self) -> Option<&A> {
+ self.get(0)
+ }
+
+ /// Get the last element of a vector.
+ ///
+ /// If the vector is empty, `None` is returned.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn back(&self) -> Option<&A> {
+ if self.is_empty() {
+ None
+ } else {
+ self.get(self.len() - 1)
+ }
+ }
+
+ /// Get a mutable reference to the last element of a vector.
+ ///
+ /// If the vector is empty, `None` is returned.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn back_mut(&mut self) -> Option<&mut A> {
+ if self.is_empty() {
+ None
+ } else {
+ let len = self.len();
+ self.get_mut(len - 1)
+ }
+ }
+
+ /// Get the last element of a vector.
+ ///
+ /// If the vector is empty, `None` is returned.
+ ///
+ /// This is an alias for the [`back`][back] method.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [back]: #method.back
+ #[inline]
+ #[must_use]
+ pub fn last(&self) -> Option<&A> {
+ self.back()
+ }
+
+ /// Get the index of a given element in the vector.
+ ///
+ /// Searches the vector for the first occurrence of a given value,
+ /// and returns the index of the value if it's there. Otherwise,
+ /// it returns `None`.
+ ///
+ /// Time: O(n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector![1, 2, 3, 4, 5];
+ /// assert_eq!(Some(2), vec.index_of(&3));
+ /// assert_eq!(None, vec.index_of(&31337));
+ /// ```
+ #[must_use]
+ pub fn index_of(&self, value: &A) -> Option<usize>
+ where
+ A: PartialEq,
+ {
+ for (index, item) in self.iter().enumerate() {
+ if value == item {
+ return Some(index);
+ }
+ }
+ None
+ }
+
+ /// Test if a given element is in the vector.
+ ///
+ /// Searches the vector for the first occurrence of a given value,
+ /// and returns `true` if it's there. If it's nowhere to be found
+ /// in the vector, it returns `false`.
+ ///
+ /// Time: O(n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector![1, 2, 3, 4, 5];
+ /// assert_eq!(true, vec.contains(&3));
+ /// assert_eq!(false, vec.contains(&31337));
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn contains(&self, value: &A) -> bool
+ where
+ A: PartialEq,
+ {
+ self.index_of(value).is_some()
+ }
+
+ /// Discard all elements from the vector.
+ ///
+ /// This leaves you with an empty vector, and all elements that
+ /// were previously inside it are dropped.
+ ///
+ /// Time: O(n)
+ pub fn clear(&mut self) {
+ if !self.is_empty() {
+ self.vector = Inline(self.pool().clone(), InlineArray::new());
+ }
+ }
+
+ /// Binary search a sorted vector for a given element using a comparator
+ /// function.
+ ///
+ /// Assumes the vector has already been sorted using the same comparator
+ /// function, eg. by using [`sort_by`][sort_by].
+ ///
+ /// If the value is found, it returns `Ok(index)` where `index` is the index
+ /// of the element. If the value isn't found, it returns `Err(index)` where
+ /// `index` is the index at which the element would need to be inserted to
+ /// maintain sorted order.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [sort_by]: #method.sort_by
+ pub fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize>
+ where
+ F: FnMut(&A) -> Ordering,
+ {
+ let mut size = self.len();
+ if size == 0 {
+ return Err(0);
+ }
+ let mut base = 0;
+ while size > 1 {
+ let half = size / 2;
+ let mid = base + half;
+ base = match f(&self[mid]) {
+ Ordering::Greater => base,
+ _ => mid,
+ };
+ size -= half;
+ }
+ match f(&self[base]) {
+ Ordering::Equal => Ok(base),
+ Ordering::Greater => Err(base),
+ Ordering::Less => Err(base + 1),
+ }
+ }
+
+ /// Binary search a sorted vector for a given element.
+ ///
+ /// If the value is found, it returns `Ok(index)` where `index` is the index
+ /// of the element. If the value isn't found, it returns `Err(index)` where
+ /// `index` is the index at which the element would need to be inserted to
+ /// maintain sorted order.
+ ///
+ /// Time: O(log n)
+ pub fn binary_search(&self, value: &A) -> Result<usize, usize>
+ where
+ A: Ord,
+ {
+ self.binary_search_by(|e| e.cmp(value))
+ }
+
+ /// Binary search a sorted vector for a given element with a key extract
+ /// function.
+ ///
+ /// Assumes the vector has already been sorted using the same key extract
+ /// function, eg. by using [`sort_by_key`][sort_by_key].
+ ///
+ /// If the value is found, it returns `Ok(index)` where `index` is the index
+ /// of the element. If the value isn't found, it returns `Err(index)` where
+ /// `index` is the index at which the element would need to be inserted to
+ /// maintain sorted order.
+ ///
+ /// Time: O(log n)
+ ///
+ /// [sort_by_key]: #method.sort_by_key
+ pub fn binary_search_by_key<B, F>(&self, b: &B, mut f: F) -> Result<usize, usize>
+ where
+ F: FnMut(&A) -> B,
+ B: Ord,
+ {
+ self.binary_search_by(|k| f(k).cmp(b))
+ }
+}
+
+impl<A: Clone> Vector<A> {
+ /// Construct a vector with a single value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// let vec = Vector::unit(1337);
+ /// assert_eq!(1, vec.len());
+ /// assert_eq!(
+ /// vec.get(0),
+ /// Some(&1337)
+ /// );
+ /// ```
+ #[inline]
+ #[must_use]
+ pub fn unit(a: A) -> Self {
+ let pool = RRBPool::default();
+ if InlineArray::<A, Rrb<A>>::CAPACITY > 0 {
+ let mut array = InlineArray::new();
+ array.push(a);
+ Self {
+ vector: Inline(pool, array),
+ }
+ } else {
+ let chunk = PoolRef::new(&pool.value_pool, Chunk::unit(a));
+ Self {
+ vector: Single(pool, chunk),
+ }
+ }
+ }
+
+ /// Create a new vector with the value at index `index` updated.
+ ///
+ /// Panics if the index is out of bounds.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector![1, 2, 3];
+ /// assert_eq!(vector![1, 5, 3], vec.update(1, 5));
+ /// ```
+ #[must_use]
+ pub fn update(&self, index: usize, value: A) -> Self {
+ let mut out = self.clone();
+ out[index] = value;
+ out
+ }
+
+ /// Update the value at index `index` in a vector.
+ ///
+ /// Returns the previous value at the index.
+ ///
+ /// Panics if the index is out of bounds.
+ ///
+ /// Time: O(log n)
+ #[inline]
+ pub fn set(&mut self, index: usize, value: A) -> A {
+ replace(&mut self[index], value)
+ }
+
+ /// Swap the elements at indices `i` and `j`.
+ ///
+ /// Time: O(log n)
+ pub fn swap(&mut self, i: usize, j: usize) {
+ swap_indices(self, i, j)
+ }
+
+ /// Push a value to the front of a vector.
+ ///
+ /// Time: O(1)*
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector![5, 6, 7];
+ /// vec.push_front(4);
+ /// assert_eq!(vector![4, 5, 6, 7], vec);
+ /// ```
+ pub fn push_front(&mut self, value: A) {
+ if self.needs_promotion() {
+ self.promote_back();
+ }
+ match &mut self.vector {
+ Inline(_, chunk) => {
+ chunk.insert(0, value);
+ }
+ Single(pool, chunk) => PoolRef::make_mut(&pool.value_pool, chunk).push_front(value),
+ Full(pool, tree) => tree.push_front(pool, value),
+ }
+ }
+
+ /// Push a value to the back of a vector.
+ ///
+ /// Time: O(1)*
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector![1, 2, 3];
+ /// vec.push_back(4);
+ /// assert_eq!(vector![1, 2, 3, 4], vec);
+ /// ```
+ pub fn push_back(&mut self, value: A) {
+ if self.needs_promotion() {
+ self.promote_front();
+ }
+ match &mut self.vector {
+ Inline(_, chunk) => {
+ chunk.push(value);
+ }
+ Single(pool, chunk) => PoolRef::make_mut(&pool.value_pool, chunk).push_back(value),
+ Full(pool, tree) => tree.push_back(pool, value),
+ }
+ }
+
+ /// Remove the first element from a vector and return it.
+ ///
+ /// Time: O(1)*
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector![1, 2, 3];
+ /// assert_eq!(Some(1), vec.pop_front());
+ /// assert_eq!(vector![2, 3], vec);
+ /// ```
+ pub fn pop_front(&mut self) -> Option<A> {
+ if self.is_empty() {
+ None
+ } else {
+ match &mut self.vector {
+ Inline(_, chunk) => chunk.remove(0),
+ Single(pool, chunk) => Some(PoolRef::make_mut(&pool.value_pool, chunk).pop_front()),
+ Full(pool, tree) => tree.pop_front(pool),
+ }
+ }
+ }
+
+ /// Remove the last element from a vector and return it.
+ ///
+ /// Time: O(1)*
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::Vector;
+ /// let mut vec = vector![1, 2, 3];
+ /// assert_eq!(Some(3), vec.pop_back());
+ /// assert_eq!(vector![1, 2], vec);
+ /// ```
+ pub fn pop_back(&mut self) -> Option<A> {
+ if self.is_empty() {
+ None
+ } else {
+ match &mut self.vector {
+ Inline(_, chunk) => chunk.pop(),
+ Single(pool, chunk) => Some(PoolRef::make_mut(&pool.value_pool, chunk).pop_back()),
+ Full(pool, tree) => tree.pop_back(pool),
+ }
+ }
+ }
+
+ /// Append the vector `other` to the end of the current vector.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// let mut vec = vector![1, 2, 3];
+ /// vec.append(vector![7, 8, 9]);
+ /// assert_eq!(vector![1, 2, 3, 7, 8, 9], vec);
+ /// ```
+ pub fn append(&mut self, mut other: Self) {
+ if other.is_empty() {
+ return;
+ }
+
+ if self.is_empty() {
+ *self = other;
+ return;
+ }
+
+ self.promote_inline();
+ other.promote_inline();
+
+ let total_length = self
+ .len()
+ .checked_add(other.len())
+ .expect("Vector length overflow");
+
+ match &mut self.vector {
+ Inline(_, _) => unreachable!("inline vecs should have been promoted"),
+ Single(pool, left) => {
+ match &mut other.vector {
+ Inline(_, _) => unreachable!("inline vecs should have been promoted"),
+ // If both are single chunks and left has room for right: directly
+ // memcpy right into left
+ Single(_, ref mut right) if total_length <= CHUNK_SIZE => {
+ PoolRef::make_mut(&pool.value_pool, left)
+ .append(PoolRef::make_mut(&pool.value_pool, right));
+ return;
+ }
+ // If only left is a single chunk and has room for right: push
+ // right's elements into left
+ _ if total_length <= CHUNK_SIZE => {
+ while let Some(value) = other.pop_front() {
+ PoolRef::make_mut(&pool.value_pool, left).push_back(value);
+ }
+ return;
+ }
+ _ => {}
+ }
+ }
+ Full(pool, left) => {
+ if let Full(_, mut right) = other.vector {
+ // If left and right are trees with empty middles, left has no back
+ // buffers, and right has no front buffers: copy right's back
+ // buffers over to left
+ if left.middle.is_empty()
+ && right.middle.is_empty()
+ && left.outer_b.is_empty()
+ && left.inner_b.is_empty()
+ && right.outer_f.is_empty()
+ && right.inner_f.is_empty()
+ {
+ left.inner_b = right.inner_b;
+ left.outer_b = right.outer_b;
+ left.length = total_length;
+ return;
+ }
+ // If left and right are trees with empty middles and left's buffers
+ // can fit right's buffers: push right's elements onto left
+ if left.middle.is_empty()
+ && right.middle.is_empty()
+ && total_length <= CHUNK_SIZE * 4
+ {
+ while let Some(value) = right.pop_front(pool) {
+ left.push_back(pool, value);
+ }
+ return;
+ }
+ // Both are full and big: do the full RRB join
+ let inner_b1 = left.inner_b.clone();
+ left.push_middle(pool, Side::Right, inner_b1);
+ let outer_b1 = left.outer_b.clone();
+ left.push_middle(pool, Side::Right, outer_b1);
+ let inner_f2 = right.inner_f.clone();
+ right.push_middle(pool, Side::Left, inner_f2);
+ let outer_f2 = right.outer_f.clone();
+ right.push_middle(pool, Side::Left, outer_f2);
+
+ let mut middle1 = clone_ref(replace(&mut left.middle, Ref::from(Node::new())));
+ let mut middle2 = clone_ref(right.middle);
+ let normalised_middle = match left.middle_level.cmp(&right.middle_level) {
+ Ordering::Greater => {
+ middle2 = middle2.elevate(pool, left.middle_level - right.middle_level);
+ left.middle_level
+ }
+ Ordering::Less => {
+ middle1 = middle1.elevate(pool, right.middle_level - left.middle_level);
+ right.middle_level
+ }
+ Ordering::Equal => left.middle_level,
+ };
+ left.middle = Ref::new(Node::merge(pool, middle1, middle2, normalised_middle));
+ left.middle_level = normalised_middle + 1;
+
+ left.inner_b = right.inner_b;
+ left.outer_b = right.outer_b;
+ left.length = total_length;
+ left.prune();
+ return;
+ }
+ }
+ }
+ // No optimisations available, and either left, right or both are
+ // single: promote both to full and retry
+ self.promote_front();
+ other.promote_back();
+ self.append(other)
+ }
+
+ /// Retain only the elements specified by the predicate.
+ ///
+ /// Remove all elements for which the provided function `f`
+ /// returns false from the vector.
+ ///
+ /// Time: O(n)
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&A) -> bool,
+ {
+ let len = self.len();
+ let mut del = 0;
+ {
+ let mut focus = self.focus_mut();
+ for i in 0..len {
+ if !f(focus.index(i)) {
+ del += 1;
+ } else if del > 0 {
+ focus.swap(i - del, i);
+ }
+ }
+ }
+ if del > 0 {
+ self.split_off(len - del);
+ }
+ }
+
+ /// Split a vector at a given index.
+ ///
+ /// Split a vector at a given index, consuming the vector and
+ /// returning a pair of the left hand side and the right hand side
+ /// of the split.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// let mut vec = vector![1, 2, 3, 7, 8, 9];
+ /// let (left, right) = vec.split_at(3);
+ /// assert_eq!(vector![1, 2, 3], left);
+ /// assert_eq!(vector![7, 8, 9], right);
+ /// ```
+ pub fn split_at(mut self, index: usize) -> (Self, Self) {
+ let right = self.split_off(index);
+ (self, right)
+ }
+
+ /// Split a vector at a given index.
+ ///
+ /// Split a vector at a given index, leaving the left hand side in
+ /// the current vector and returning a new vector containing the
+ /// right hand side.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// let mut left = vector![1, 2, 3, 7, 8, 9];
+ /// let right = left.split_off(3);
+ /// assert_eq!(vector![1, 2, 3], left);
+ /// assert_eq!(vector![7, 8, 9], right);
+ /// ```
+ pub fn split_off(&mut self, index: usize) -> Self {
+ assert!(index <= self.len());
+
+ match &mut self.vector {
+ Inline(pool, chunk) => Self {
+ vector: Inline(pool.clone(), chunk.split_off(index)),
+ },
+ Single(pool, chunk) => Self {
+ vector: Single(
+ pool.clone(),
+ PoolRef::new(
+ &pool.value_pool,
+ PoolRef::make_mut(&pool.value_pool, chunk).split_off(index),
+ ),
+ ),
+ },
+ Full(pool, tree) => {
+ let mut local_index = index;
+
+ if local_index < tree.outer_f.len() {
+ let of2 = PoolRef::make_mut(&pool.value_pool, &mut tree.outer_f)
+ .split_off(local_index);
+ let right = Rrb {
+ length: tree.length - index,
+ middle_level: tree.middle_level,
+ outer_f: PoolRef::new(&pool.value_pool, of2),
+ inner_f: replace_pool_def(&pool.value_pool, &mut tree.inner_f),
+ middle: std::mem::take(&mut tree.middle),
+ inner_b: replace_pool_def(&pool.value_pool, &mut tree.inner_b),
+ outer_b: replace_pool_def(&pool.value_pool, &mut tree.outer_b),
+ };
+ tree.length = index;
+ tree.middle_level = 0;
+ return Self {
+ vector: Full(pool.clone(), right),
+ };
+ }
+
+ local_index -= tree.outer_f.len();
+
+ if local_index < tree.inner_f.len() {
+ let if2 = PoolRef::make_mut(&pool.value_pool, &mut tree.inner_f)
+ .split_off(local_index);
+ let right = Rrb {
+ length: tree.length - index,
+ middle_level: tree.middle_level,
+ outer_f: PoolRef::new(&pool.value_pool, if2),
+ inner_f: PoolRef::<Chunk<A>>::default(&pool.value_pool),
+ middle: std::mem::take(&mut tree.middle),
+ inner_b: replace_pool_def(&pool.value_pool, &mut tree.inner_b),
+ outer_b: replace_pool_def(&pool.value_pool, &mut tree.outer_b),
+ };
+ tree.length = index;
+ tree.middle_level = 0;
+ swap(&mut tree.outer_b, &mut tree.inner_f);
+ return Self {
+ vector: Full(pool.clone(), right),
+ };
+ }
+
+ local_index -= tree.inner_f.len();
+
+ if local_index < tree.middle.len() {
+ let mut right_middle = tree.middle.clone();
+ let (c1, c2) = {
+ let m1 = Ref::make_mut(&mut tree.middle);
+ let m2 = Ref::make_mut(&mut right_middle);
+ match m1.split(pool, tree.middle_level, Side::Right, local_index) {
+ SplitResult::Dropped(_) => (),
+ SplitResult::OutOfBounds => unreachable!(),
+ };
+ match m2.split(pool, tree.middle_level, Side::Left, local_index) {
+ SplitResult::Dropped(_) => (),
+ SplitResult::OutOfBounds => unreachable!(),
+ };
+ let c1 = match m1.pop_chunk(pool, tree.middle_level, Side::Right) {
+ PopResult::Empty => PoolRef::default(&pool.value_pool),
+ PopResult::Done(chunk) => chunk,
+ PopResult::Drained(chunk) => {
+ m1.clear_node();
+ chunk
+ }
+ };
+ let c2 = match m2.pop_chunk(pool, tree.middle_level, Side::Left) {
+ PopResult::Empty => PoolRef::default(&pool.value_pool),
+ PopResult::Done(chunk) => chunk,
+ PopResult::Drained(chunk) => {
+ m2.clear_node();
+ chunk
+ }
+ };
+ (c1, c2)
+ };
+ let mut right = Rrb {
+ length: tree.length - index,
+ middle_level: tree.middle_level,
+ outer_f: c2,
+ inner_f: PoolRef::<Chunk<A>>::default(&pool.value_pool),
+ middle: right_middle,
+ inner_b: replace_pool_def(&pool.value_pool, &mut tree.inner_b),
+ outer_b: replace(&mut tree.outer_b, c1),
+ };
+ tree.length = index;
+ tree.prune();
+ right.prune();
+ return Self {
+ vector: Full(pool.clone(), right),
+ };
+ }
+
+ local_index -= tree.middle.len();
+
+ if local_index < tree.inner_b.len() {
+ let ib2 = PoolRef::make_mut(&pool.value_pool, &mut tree.inner_b)
+ .split_off(local_index);
+ let right = Rrb {
+ length: tree.length - index,
+ outer_b: replace_pool_def(&pool.value_pool, &mut tree.outer_b),
+ outer_f: PoolRef::new(&pool.value_pool, ib2),
+ ..Rrb::new(pool)
+ };
+ tree.length = index;
+ swap(&mut tree.outer_b, &mut tree.inner_b);
+ return Self {
+ vector: Full(pool.clone(), right),
+ };
+ }
+
+ local_index -= tree.inner_b.len();
+
+ let ob2 =
+ PoolRef::make_mut(&pool.value_pool, &mut tree.outer_b).split_off(local_index);
+ tree.length = index;
+ Self {
+ vector: Single(pool.clone(), PoolRef::new(&pool.value_pool, ob2)),
+ }
+ }
+ }
+ }
+
+ /// Construct a vector with `count` elements removed from the
+ /// start of the current vector.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn skip(&self, count: usize) -> Self {
+ // FIXME can be made more efficient by dropping the unwanted side without constructing it
+ self.clone().split_off(count)
+ }
+
+ /// Construct a vector of the first `count` elements from the
+ /// current vector.
+ ///
+ /// Time: O(log n)
+ #[must_use]
+ pub fn take(&self, count: usize) -> Self {
+ // FIXME can be made more efficient by dropping the unwanted side without constructing it
+ let mut left = self.clone();
+ left.split_off(count);
+ left
+ }
+
+ /// Truncate a vector to the given size.
+ ///
+ /// Discards all elements in the vector beyond the given length.
+ ///
+ /// Panics if the new length is greater than the current length.
+ ///
+ /// Time: O(log n)
+ pub fn truncate(&mut self, len: usize) {
+ // FIXME can be made more efficient by dropping the unwanted side without constructing it
+ self.split_off(len);
+ }
+
+ /// Extract a slice from a vector.
+ ///
+ /// Remove the elements from `start_index` until `end_index` in
+ /// the current vector and return the removed slice as a new
+ /// vector.
+ ///
+ /// Time: O(log n)
+ pub fn slice<R>(&mut self, range: R) -> Self
+ where
+ R: RangeBounds<usize>,
+ {
+ let r = to_range(&range, self.len());
+ if r.start >= r.end || r.start >= self.len() {
+ return Vector::new();
+ }
+ let mut middle = self.split_off(r.start);
+ let right = middle.split_off(r.end - r.start);
+ self.append(right);
+ middle
+ }
+
+ /// Insert an element into a vector.
+ ///
+ /// Insert an element at position `index`, shifting all elements
+ /// after it to the right.
+ ///
+ /// ## Performance Note
+ ///
+ /// While `push_front` and `push_back` are heavily optimised
+ /// operations, `insert` in the middle of a vector requires a
+ /// split, a push, and an append. Thus, if you want to insert
+ /// many elements at the same location, instead of `insert`ing
+ /// them one by one, you should rather create a new vector
+ /// containing the elements to insert, split the vector at the
+ /// insertion point, and append the left hand, the new vector and
+ /// the right hand in order.
+ ///
+ /// Time: O(log n)
+ pub fn insert(&mut self, index: usize, value: A) {
+ if index == 0 {
+ return self.push_front(value);
+ }
+ if index == self.len() {
+ return self.push_back(value);
+ }
+ assert!(index < self.len());
+ if if let Inline(_, chunk) = &self.vector {
+ chunk.is_full()
+ } else {
+ false
+ } {
+ self.promote_inline();
+ }
+ match &mut self.vector {
+ Inline(_, chunk) => {
+ chunk.insert(index, value);
+ }
+ Single(pool, chunk) if chunk.len() < CHUNK_SIZE => {
+ PoolRef::make_mut(&pool.value_pool, chunk).insert(index, value)
+ }
+ // TODO a lot of optimisations still possible here
+ _ => {
+ let right = self.split_off(index);
+ self.push_back(value);
+ self.append(right);
+ }
+ }
+ }
+
+ /// Remove an element from a vector.
+ ///
+ /// Remove the element from position 'index', shifting all
+ /// elements after it to the left, and return the removed element.
+ ///
+ /// ## Performance Note
+ ///
+ /// While `pop_front` and `pop_back` are heavily optimised
+ /// operations, `remove` in the middle of a vector requires a
+ /// split, a pop, and an append. Thus, if you want to remove many
+ /// elements from the same location, instead of `remove`ing them
+ /// one by one, it is much better to use [`slice`][slice].
+ ///
+ /// Time: O(log n)
+ ///
+ /// [slice]: #method.slice
+ pub fn remove(&mut self, index: usize) -> A {
+ assert!(index < self.len());
+ match &mut self.vector {
+ Inline(_, chunk) => chunk.remove(index).unwrap(),
+ Single(pool, chunk) => PoolRef::make_mut(&pool.value_pool, chunk).remove(index),
+ _ => {
+ if index == 0 {
+ return self.pop_front().unwrap();
+ }
+ if index == self.len() - 1 {
+ return self.pop_back().unwrap();
+ }
+ // TODO a lot of optimisations still possible here
+ let mut right = self.split_off(index);
+ let value = right.pop_front().unwrap();
+ self.append(right);
+ value
+ }
+ }
+ }
+
+ /// Insert an element into a sorted vector.
+ ///
+ /// Insert an element into a vector in sorted order, assuming the vector is
+ /// already in sorted order.
+ ///
+ /// Time: O(log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// let mut vec = vector![1, 2, 3, 7, 8, 9];
+ /// vec.insert_ord(5);
+ /// assert_eq!(vector![1, 2, 3, 5, 7, 8, 9], vec);
+ /// ```
+ pub fn insert_ord(&mut self, item: A)
+ where
+ A: Ord,
+ {
+ match self.binary_search(&item) {
+ Ok(index) => self.insert(index, item),
+ Err(index) => self.insert(index, item),
+ }
+ }
+
+ /// Sort a vector.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// let mut vec = vector![3, 2, 5, 4, 1];
+ /// vec.sort();
+ /// assert_eq!(vector![1, 2, 3, 4, 5], vec);
+ /// ```
+ pub fn sort(&mut self)
+ where
+ A: Ord,
+ {
+ self.sort_by(Ord::cmp)
+ }
+
+ /// Sort a vector using a comparator function.
+ ///
+ /// Time: O(n log n)
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #[macro_use] extern crate im_rc as im;
+ /// # use im::vector::Vector;
+ /// let mut vec = vector![3, 2, 5, 4, 1];
+ /// vec.sort_by(|left, right| left.cmp(right));
+ /// assert_eq!(vector![1, 2, 3, 4, 5], vec);
+ /// ```
+ pub fn sort_by<F>(&mut self, cmp: F)
+ where
+ F: Fn(&A, &A) -> Ordering,
+ {
+ let len = self.len();
+ if len > 1 {
+ sort::quicksort(self.focus_mut(), &cmp);
+ }
+ }
+
+ /// Verify the internal consistency of a vector.
+ ///
+ /// This method walks the RRB tree making up the current `Vector`
+ /// (if it has one) and verifies that all the invariants hold.
+ /// If something is wrong, it will panic.
+ ///
+ /// This method requires the `debug` feature flag.
+ #[cfg(any(test, feature = "debug"))]
+ pub fn assert_invariants(&self) {
+ if let Full(_, ref tree) = self.vector {
+ tree.assert_invariants();
+ }
+ }
+}
+
+// Implementation details
+
+impl<A: Clone> Rrb<A> {
+ fn new(pool: &RRBPool<A>) -> Self {
+ Rrb {
+ length: 0,
+ middle_level: 0,
+ outer_f: PoolRef::default(&pool.value_pool),
+ inner_f: PoolRef::default(&pool.value_pool),
+ middle: Ref::new(Node::new()),
+ inner_b: PoolRef::default(&pool.value_pool),
+ outer_b: PoolRef::default(&pool.value_pool),
+ }
+ }
+
+ #[cfg(any(test, feature = "debug"))]
+ fn assert_invariants(&self) {
+ let ml = self.middle.assert_invariants(self.middle_level);
+ assert_eq!(
+ self.length,
+ self.outer_f.len() + self.inner_f.len() + ml + self.inner_b.len() + self.outer_b.len()
+ );
+ }
+
+ fn prune(&mut self) {
+ if self.middle.is_empty() {
+ self.middle = Ref::new(Node::new());
+ self.middle_level = 0;
+ } else {
+ while self.middle_level > 0 && self.middle.is_single() {
+ // FIXME could be optimised, cloning the node is expensive
+ self.middle = Ref::new(self.middle.first_child().clone());
+ self.middle_level -= 1;
+ }
+ }
+ }
+
+ fn pop_front(&mut self, pool: &RRBPool<A>) -> Option<A> {
+ if self.length == 0 {
+ return None;
+ }
+ if self.outer_f.is_empty() {
+ if self.inner_f.is_empty() {
+ if self.middle.is_empty() {
+ if self.inner_b.is_empty() {
+ swap(&mut self.outer_f, &mut self.outer_b);
+ } else {
+ swap(&mut self.outer_f, &mut self.inner_b);
+ }
+ } else {
+ self.outer_f = self.pop_middle(pool, Side::Left).unwrap();
+ }
+ } else {
+ swap(&mut self.outer_f, &mut self.inner_f);
+ }
+ }
+ self.length -= 1;
+ let outer_f = PoolRef::make_mut(&pool.value_pool, &mut self.outer_f);
+ Some(outer_f.pop_front())
+ }
+
+ fn pop_back(&mut self, pool: &RRBPool<A>) -> Option<A> {
+ if self.length == 0 {
+ return None;
+ }
+ if self.outer_b.is_empty() {
+ if self.inner_b.is_empty() {
+ if self.middle.is_empty() {
+ if self.inner_f.is_empty() {
+ swap(&mut self.outer_b, &mut self.outer_f);
+ } else {
+ swap(&mut self.outer_b, &mut self.inner_f);
+ }
+ } else {
+ self.outer_b = self.pop_middle(pool, Side::Right).unwrap();
+ }
+ } else {
+ swap(&mut self.outer_b, &mut self.inner_b);
+ }
+ }
+ self.length -= 1;
+ let outer_b = PoolRef::make_mut(&pool.value_pool, &mut self.outer_b);
+ Some(outer_b.pop_back())
+ }
+
+ fn push_front(&mut self, pool: &RRBPool<A>, value: A) {
+ if self.outer_f.is_full() {
+ swap(&mut self.outer_f, &mut self.inner_f);
+ if !self.outer_f.is_empty() {
+ let mut chunk = PoolRef::new(&pool.value_pool, Chunk::new());
+ swap(&mut chunk, &mut self.outer_f);
+ self.push_middle(pool, Side::Left, chunk);
+ }
+ }
+ self.length = self.length.checked_add(1).expect("Vector length overflow");
+ let outer_f = PoolRef::make_mut(&pool.value_pool, &mut self.outer_f);
+ outer_f.push_front(value)
+ }
+
+ fn push_back(&mut self, pool: &RRBPool<A>, value: A) {
+ if self.outer_b.is_full() {
+ swap(&mut self.outer_b, &mut self.inner_b);
+ if !self.outer_b.is_empty() {
+ let mut chunk = PoolRef::new(&pool.value_pool, Chunk::new());
+ swap(&mut chunk, &mut self.outer_b);
+ self.push_middle(pool, Side::Right, chunk);
+ }
+ }
+ self.length = self.length.checked_add(1).expect("Vector length overflow");
+ let outer_b = PoolRef::make_mut(&pool.value_pool, &mut self.outer_b);
+ outer_b.push_back(value)
+ }
+
+ fn push_middle(&mut self, pool: &RRBPool<A>, side: Side, chunk: PoolRef<Chunk<A>>) {
+ if chunk.is_empty() {
+ return;
+ }
+ let new_middle = {
+ let middle = Ref::make_mut(&mut self.middle);
+ match middle.push_chunk(pool, self.middle_level, side, chunk) {
+ PushResult::Done => return,
+ PushResult::Full(chunk, _num_drained) => Ref::from({
+ match side {
+ Side::Left => Node::from_chunk(pool, self.middle_level, chunk)
+ .join_branches(pool, middle.clone(), self.middle_level),
+ Side::Right => middle.clone().join_branches(
+ pool,
+ Node::from_chunk(pool, self.middle_level, chunk),
+ self.middle_level,
+ ),
+ }
+ }),
+ }
+ };
+ self.middle_level += 1;
+ self.middle = new_middle;
+ }
+
+ fn pop_middle(&mut self, pool: &RRBPool<A>, side: Side) -> Option<PoolRef<Chunk<A>>> {
+ let chunk = {
+ let middle = Ref::make_mut(&mut self.middle);
+ match middle.pop_chunk(pool, self.middle_level, side) {
+ PopResult::Empty => return None,
+ PopResult::Done(chunk) => chunk,
+ PopResult::Drained(chunk) => {
+ middle.clear_node();
+ self.middle_level = 0;
+ chunk
+ }
+ }
+ };
+ Some(chunk)
+ }
+}
+
+#[inline]
+fn replace_pool_def<A: PoolDefault>(pool: &Pool<A>, dest: &mut PoolRef<A>) -> PoolRef<A> {
+ replace(dest, PoolRef::default(pool))
+}
+
+// Core traits
+
+impl<A: Clone> Default for Vector<A> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<A: Clone> Clone for Vector<A> {
+ /// Clone a vector.
+ ///
+ /// Time: O(1), or O(n) with a very small, bounded *n* for an inline vector.
+ fn clone(&self) -> Self {
+ Self {
+ vector: match &self.vector {
+ Inline(pool, chunk) => Inline(pool.clone(), chunk.clone()),
+ Single(pool, chunk) => Single(pool.clone(), chunk.clone()),
+ Full(pool, tree) => Full(pool.clone(), tree.clone()),
+ },
+ }
+ }
+}
+
+impl<A: Clone + Debug> Debug for Vector<A> {
+ fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
+ f.debug_list().entries(self.iter()).finish()
+ // match self {
+ // Full(rrb) => {
+ // writeln!(f, "Head: {:?} {:?}", rrb.outer_f, rrb.inner_f)?;
+ // rrb.middle.print(f, 0, rrb.middle_level)?;
+ // writeln!(f, "Tail: {:?} {:?}", rrb.inner_b, rrb.outer_b)
+ // }
+ // Single(_) => write!(f, "nowt"),
+ // }
+ }
+}
+
+#[cfg(not(has_specialisation))]
+impl<A: Clone + PartialEq> PartialEq for Vector<A> {
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.iter().eq(other.iter())
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<A: Clone + PartialEq> PartialEq for Vector<A> {
+ default fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.iter().eq(other.iter())
+ }
+}
+
+#[cfg(has_specialisation)]
+impl<A: Clone + Eq> PartialEq for Vector<A> {
+ fn eq(&self, other: &Self) -> bool {
+ fn cmp_chunk<A>(left: &PoolRef<Chunk<A>>, right: &PoolRef<Chunk<A>>) -> bool {
+ (left.is_empty() && right.is_empty()) || PoolRef::ptr_eq(left, right)
+ }
+
+ if std::ptr::eq(self, other) {
+ return true;
+ }
+
+ match (&self.vector, &other.vector) {
+ (Single(_, left), Single(_, right)) => {
+ if cmp_chunk(left, right) {
+ return true;
+ }
+ self.iter().eq(other.iter())
+ }
+ (Full(_, left), Full(_, right)) => {
+ if left.length != right.length {
+ return false;
+ }
+
+ if cmp_chunk(&left.outer_f, &right.outer_f)
+ && cmp_chunk(&left.inner_f, &right.inner_f)
+ && cmp_chunk(&left.inner_b, &right.inner_b)
+ && cmp_chunk(&left.outer_b, &right.outer_b)
+ && ((left.middle.is_empty() && right.middle.is_empty())
+ || Ref::ptr_eq(&left.middle, &right.middle))
+ {
+ return true;
+ }
+ self.iter().eq(other.iter())
+ }
+ _ => self.len() == other.len() && self.iter().eq(other.iter()),
+ }
+ }
+}
+
+impl<A: Clone + Eq> Eq for Vector<A> {}
+
+impl<A: Clone + PartialOrd> PartialOrd for Vector<A> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+impl<A: Clone + Ord> Ord for Vector<A> {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+impl<A: Clone + Hash> Hash for Vector<A> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ for i in self {
+ i.hash(state)
+ }
+ }
+}
+
+impl<A: Clone> Sum for Vector<A> {
+ fn sum<I>(it: I) -> Self
+ where
+ I: Iterator<Item = Self>,
+ {
+ it.fold(Self::new(), |a, b| a + b)
+ }
+}
+
+impl<A: Clone> Add for Vector<A> {
+ type Output = Vector<A>;
+
+ /// Concatenate two vectors.
+ ///
+ /// Time: O(log n)
+ fn add(mut self, other: Self) -> Self::Output {
+ self.append(other);
+ self
+ }
+}
+
+impl<'a, A: Clone> Add for &'a Vector<A> {
+ type Output = Vector<A>;
+
+ /// Concatenate two vectors.
+ ///
+ /// Time: O(log n)
+ fn add(self, other: Self) -> Self::Output {
+ let mut out = self.clone();
+ out.append(other.clone());
+ out
+ }
+}
+
+impl<A: Clone> Extend<A> for Vector<A> {
+ /// Add values to the end of a vector by consuming an iterator.
+ ///
+ /// Time: O(n)
+ fn extend<I>(&mut self, iter: I)
+ where
+ I: IntoIterator<Item = A>,
+ {
+ for item in iter {
+ self.push_back(item)
+ }
+ }
+}
+
+impl<A: Clone> Index<usize> for Vector<A> {
+ type Output = A;
+ /// Get a reference to the value at index `index` in the vector.
+ ///
+ /// Time: O(log n)
+ fn index(&self, index: usize) -> &Self::Output {
+ match self.get(index) {
+ Some(value) => value,
+ None => panic!(
+ "Vector::index: index out of bounds: {} < {}",
+ index,
+ self.len()
+ ),
+ }
+ }
+}
+
+impl<A: Clone> IndexMut<usize> for Vector<A> {
+ /// Get a mutable reference to the value at index `index` in the
+ /// vector.
+ ///
+ /// Time: O(log n)
+ fn index_mut(&mut self, index: usize) -> &mut Self::Output {
+ match self.get_mut(index) {
+ Some(value) => value,
+ None => panic!("Vector::index_mut: index out of bounds"),
+ }
+ }
+}
+
+// Conversions
+
+impl<'a, A: Clone> IntoIterator for &'a Vector<A> {
+ type Item = &'a A;
+ type IntoIter = Iter<'a, A>;
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter()
+ }
+}
+
+impl<A: Clone> IntoIterator for Vector<A> {
+ type Item = A;
+ type IntoIter = ConsumingIter<A>;
+ fn into_iter(self) -> Self::IntoIter {
+ ConsumingIter::new(self)
+ }
+}
+
+impl<A: Clone> FromIterator<A> for Vector<A> {
+ /// Create a vector from an iterator.
+ ///
+ /// Time: O(n)
+ fn from_iter<I>(iter: I) -> Self
+ where
+ I: IntoIterator<Item = A>,
+ {
+ let mut seq = Self::new();
+ for item in iter {
+ seq.push_back(item)
+ }
+ seq
+ }
+}
+
+impl<'s, 'a, A, OA> From<&'s Vector<&'a A>> for Vector<OA>
+where
+ A: ToOwned<Owned = OA>,
+ OA: Borrow<A> + Clone,
+{
+ fn from(vec: &Vector<&A>) -> Self {
+ vec.iter().map(|a| (*a).to_owned()).collect()
+ }
+}
+
+impl<'a, A: Clone> From<&'a [A]> for Vector<A> {
+ fn from(slice: &[A]) -> Self {
+ slice.iter().cloned().collect()
+ }
+}
+
+impl<A: Clone> From<Vec<A>> for Vector<A> {
+ /// Create a vector from a [`std::vec::Vec`][vec].
+ ///
+ /// Time: O(n)
+ ///
+ /// [vec]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+ fn from(vec: Vec<A>) -> Self {
+ vec.into_iter().collect()
+ }
+}
+
+impl<'a, A: Clone> From<&'a Vec<A>> for Vector<A> {
+ /// Create a vector from a [`std::vec::Vec`][vec].
+ ///
+ /// Time: O(n)
+ ///
+ /// [vec]: https://doc.rust-lang.org/std/vec/struct.Vec.html
+ fn from(vec: &Vec<A>) -> Self {
+ vec.iter().cloned().collect()
+ }
+}
+
+// Iterators
+
+/// An iterator over vectors with values of type `A`.
+///
+/// To obtain one, use [`Vector::iter()`][iter].
+///
+/// [iter]: enum.Vector.html#method.iter
+pub struct Iter<'a, A> {
+ focus: Focus<'a, A>,
+ front_index: usize,
+ back_index: usize,
+}
+
+impl<'a, A: Clone> Iter<'a, A> {
+ fn new(seq: &'a Vector<A>) -> Self {
+ Iter {
+ focus: seq.focus(),
+ front_index: 0,
+ back_index: seq.len(),
+ }
+ }
+
+ fn from_focus(focus: Focus<'a, A>) -> Self {
+ Iter {
+ front_index: 0,
+ back_index: focus.len(),
+ focus,
+ }
+ }
+}
+
+impl<'a, A: Clone> Iterator for Iter<'a, A> {
+ type Item = &'a A;
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ #[allow(unsafe_code)]
+ let focus: &'a mut Focus<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ let value = focus.get(self.front_index);
+ self.front_index += 1;
+ value
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let remaining = self.back_index - self.front_index;
+ (remaining, Some(remaining))
+ }
+}
+
+impl<'a, A: Clone> DoubleEndedIterator for Iter<'a, A> {
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ self.back_index -= 1;
+ #[allow(unsafe_code)]
+ let focus: &'a mut Focus<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ focus.get(self.back_index)
+ }
+}
+
+impl<'a, A: Clone> ExactSizeIterator for Iter<'a, A> {}
+
+impl<'a, A: Clone> FusedIterator for Iter<'a, A> {}
+
+/// A mutable iterator over vectors with values of type `A`.
+///
+/// To obtain one, use [`Vector::iter_mut()`][iter_mut].
+///
+/// [iter_mut]: enum.Vector.html#method.iter_mut
+pub struct IterMut<'a, A> {
+ focus: FocusMut<'a, A>,
+ front_index: usize,
+ back_index: usize,
+}
+
+impl<'a, A> IterMut<'a, A>
+where
+ A: Clone,
+{
+ fn new(seq: &'a mut Vector<A>) -> Self {
+ let focus = seq.focus_mut();
+ let len = focus.len();
+ IterMut {
+ focus,
+ front_index: 0,
+ back_index: len,
+ }
+ }
+
+ fn from_focus(focus: FocusMut<'a, A>) -> Self {
+ IterMut {
+ front_index: 0,
+ back_index: focus.len(),
+ focus,
+ }
+ }
+}
+
+impl<'a, A> Iterator for IterMut<'a, A>
+where
+ A: 'a + Clone,
+{
+ type Item = &'a mut A;
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ #[allow(unsafe_code)]
+ let focus: &'a mut FocusMut<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ let value = focus.get_mut(self.front_index);
+ self.front_index += 1;
+ value
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let remaining = self.back_index - self.front_index;
+ (remaining, Some(remaining))
+ }
+}
+
+impl<'a, A> DoubleEndedIterator for IterMut<'a, A>
+where
+ A: 'a + Clone,
+{
+ /// Remove and return an element from the back of the iterator.
+ ///
+ /// Time: O(1)*
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ self.back_index -= 1;
+ #[allow(unsafe_code)]
+ let focus: &'a mut FocusMut<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ focus.get_mut(self.back_index)
+ }
+}
+
+impl<'a, A: Clone> ExactSizeIterator for IterMut<'a, A> {}
+
+impl<'a, A: Clone> FusedIterator for IterMut<'a, A> {}
+
+/// A consuming iterator over vectors with values of type `A`.
+pub struct ConsumingIter<A> {
+ vector: Vector<A>,
+}
+
+impl<A: Clone> ConsumingIter<A> {
+ fn new(vector: Vector<A>) -> Self {
+ Self { vector }
+ }
+}
+
+impl<A: Clone> Iterator for ConsumingIter<A> {
+ type Item = A;
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ self.vector.pop_front()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = self.vector.len();
+ (len, Some(len))
+ }
+}
+
+impl<A: Clone> DoubleEndedIterator for ConsumingIter<A> {
+ /// Remove and return an element from the back of the iterator.
+ ///
+ /// Time: O(1)*
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.vector.pop_back()
+ }
+}
+
+impl<A: Clone> ExactSizeIterator for ConsumingIter<A> {}
+
+impl<A: Clone> FusedIterator for ConsumingIter<A> {}
+
+/// An iterator over the leaf nodes of a vector.
+///
+/// To obtain one, use [`Vector::chunks()`][chunks].
+///
+/// [chunks]: enum.Vector.html#method.chunks
+pub struct Chunks<'a, A> {
+ focus: Focus<'a, A>,
+ front_index: usize,
+ back_index: usize,
+}
+
+impl<'a, A: Clone> Chunks<'a, A> {
+ fn new(seq: &'a Vector<A>) -> Self {
+ Chunks {
+ focus: seq.focus(),
+ front_index: 0,
+ back_index: seq.len(),
+ }
+ }
+}
+
+impl<'a, A: Clone> Iterator for Chunks<'a, A> {
+ type Item = &'a [A];
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ #[allow(unsafe_code)]
+ let focus: &'a mut Focus<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ let (range, value) = focus.chunk_at(self.front_index);
+ self.front_index = range.end;
+ Some(value)
+ }
+}
+
+impl<'a, A: Clone> DoubleEndedIterator for Chunks<'a, A> {
+ /// Remove and return an element from the back of the iterator.
+ ///
+ /// Time: O(1)*
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ self.back_index -= 1;
+ #[allow(unsafe_code)]
+ let focus: &'a mut Focus<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ let (range, value) = focus.chunk_at(self.back_index);
+ self.back_index = range.start;
+ Some(value)
+ }
+}
+
+impl<'a, A: Clone> FusedIterator for Chunks<'a, A> {}
+
+/// A mutable iterator over the leaf nodes of a vector.
+///
+/// To obtain one, use [`Vector::chunks_mut()`][chunks_mut].
+///
+/// [chunks_mut]: enum.Vector.html#method.chunks_mut
+pub struct ChunksMut<'a, A> {
+ focus: FocusMut<'a, A>,
+ front_index: usize,
+ back_index: usize,
+}
+
+impl<'a, A: Clone> ChunksMut<'a, A> {
+ fn new(seq: &'a mut Vector<A>) -> Self {
+ let len = seq.len();
+ ChunksMut {
+ focus: seq.focus_mut(),
+ front_index: 0,
+ back_index: len,
+ }
+ }
+}
+
+impl<'a, A: Clone> Iterator for ChunksMut<'a, A> {
+ type Item = &'a mut [A];
+
+ /// Advance the iterator and return the next value.
+ ///
+ /// Time: O(1)*
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ #[allow(unsafe_code)]
+ let focus: &'a mut FocusMut<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ let (range, value) = focus.chunk_at(self.front_index);
+ self.front_index = range.end;
+ Some(value)
+ }
+}
+
+impl<'a, A: Clone> DoubleEndedIterator for ChunksMut<'a, A> {
+ /// Remove and return an element from the back of the iterator.
+ ///
+ /// Time: O(1)*
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.front_index >= self.back_index {
+ return None;
+ }
+ self.back_index -= 1;
+ #[allow(unsafe_code)]
+ let focus: &'a mut FocusMut<'a, A> = unsafe { &mut *(&mut self.focus as *mut _) };
+ let (range, value) = focus.chunk_at(self.back_index);
+ self.back_index = range.start;
+ Some(value)
+ }
+}
+
+impl<'a, A: Clone> FusedIterator for ChunksMut<'a, A> {}
+
+// Proptest
+#[cfg(any(test, feature = "proptest"))]
+#[doc(hidden)]
+pub mod proptest {
+ #[deprecated(
+ since = "14.3.0",
+ note = "proptest strategies have moved to im::proptest"
+ )]
+ pub use crate::proptest::vector;
+}
+
+// Tests
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use crate::proptest::vector;
+ use ::proptest::collection::vec;
+ use ::proptest::num::{i32, usize};
+ use ::proptest::proptest;
+
+ #[test]
+ fn macro_allows_trailing_comma() {
+ let vec1 = vector![1, 2, 3];
+ let vec2 = vector![1, 2, 3,];
+ assert_eq!(vec1, vec2);
+ }
+
+ #[test]
+ fn indexing() {
+ let mut vec = vector![0, 1, 2, 3, 4, 5];
+ vec.push_front(0);
+ assert_eq!(0, *vec.get(0).unwrap());
+ assert_eq!(0, vec[0]);
+ }
+
+ #[test]
+ fn large_vector_focus() {
+ let input = (0..100_000).collect::<Vector<_>>();
+ let vec = input.clone();
+ let mut sum: i64 = 0;
+ let mut focus = vec.focus();
+ for i in 0..input.len() {
+ sum += *focus.index(i);
+ }
+ let expected: i64 = (0..100_000).sum();
+ assert_eq!(expected, sum);
+ }
+
+ #[test]
+ fn large_vector_focus_mut() {
+ let input = (0..100_000).collect::<Vector<_>>();
+ let mut vec = input.clone();
+ {
+ let mut focus = vec.focus_mut();
+ for i in 0..input.len() {
+ let p = focus.index_mut(i);
+ *p += 1;
+ }
+ }
+ let expected: Vector<i32> = input.into_iter().map(|i| i + 1).collect();
+ assert_eq!(expected, vec);
+ }
+
+ #[test]
+ fn issue_55_fwd() {
+ let mut l = Vector::new();
+ for i in 0..4098 {
+ l.append(Vector::unit(i));
+ }
+ l.append(Vector::unit(4098));
+ assert_eq!(Some(&4097), l.get(4097));
+ assert_eq!(Some(&4096), l.get(4096));
+ }
+
+ #[test]
+ fn issue_55_back() {
+ let mut l = Vector::unit(0);
+ for i in 0..4099 {
+ let mut tmp = Vector::unit(i + 1);
+ tmp.append(l);
+ l = tmp;
+ }
+ assert_eq!(Some(&4098), l.get(1));
+ assert_eq!(Some(&4097), l.get(2));
+ let len = l.len();
+ l.slice(2..len);
+ }
+
+ #[test]
+ fn issue_55_append() {
+ let mut vec1 = (0..92).collect::<Vector<_>>();
+ let vec2 = (0..165).collect::<Vector<_>>();
+ vec1.append(vec2);
+ }
+
+ #[test]
+ fn issue_70() {
+ let mut x = Vector::new();
+ for _ in 0..262 {
+ x.push_back(0);
+ }
+ for _ in 0..97 {
+ x.pop_front();
+ }
+ for &offset in &[160, 163, 160] {
+ x.remove(offset);
+ }
+ for _ in 0..64 {
+ x.push_back(0);
+ }
+ // At this point middle contains three chunks of size 64, 64 and 1
+ // respectively. Previously the next `push_back()` would append another
+ // zero-sized chunk to middle even though there is enough space left.
+ match x.vector {
+ VectorInner::Full(_, ref tree) => {
+ assert_eq!(129, tree.middle.len());
+ assert_eq!(3, tree.middle.number_of_children());
+ }
+ _ => unreachable!(),
+ }
+ x.push_back(0);
+ match x.vector {
+ VectorInner::Full(_, ref tree) => {
+ assert_eq!(131, tree.middle.len());
+ assert_eq!(3, tree.middle.number_of_children())
+ }
+ _ => unreachable!(),
+ }
+ for _ in 0..64 {
+ x.push_back(0);
+ }
+ for _ in x.iter() {}
+ }
+
+ #[test]
+ fn issue_67() {
+ let mut l = Vector::unit(4100);
+ for i in (0..4099).rev() {
+ let mut tmp = Vector::unit(i);
+ tmp.append(l);
+ l = tmp;
+ }
+ assert_eq!(4100, l.len());
+ let len = l.len();
+ let tail = l.slice(1..len);
+ assert_eq!(1, l.len());
+ assert_eq!(4099, tail.len());
+ assert_eq!(Some(&0), l.get(0));
+ assert_eq!(Some(&1), tail.get(0));
+ }
+
+ #[test]
+ fn issue_74_simple_size() {
+ use crate::nodes::rrb::NODE_SIZE;
+ let mut x = Vector::new();
+ for _ in 0..(CHUNK_SIZE
+ * (
+ 1 // inner_f
+ + (2 * NODE_SIZE) // middle: two full Entry::Nodes (4096 elements each)
+ + 1 // inner_b
+ + 1
+ // outer_b
+ ))
+ {
+ x.push_back(0u32);
+ }
+ let middle_first_node_start = CHUNK_SIZE;
+ let middle_second_node_start = middle_first_node_start + NODE_SIZE * CHUNK_SIZE;
+ // This reduces the size of the second node to 4095.
+ x.remove(middle_second_node_start);
+ // As outer_b is full, this will cause inner_b (length 64) to be pushed
+ // to middle. The first element will be merged into the second node, the
+ // remaining 63 elements will end up in a new node.
+ x.push_back(0u32);
+ match x.vector {
+ VectorInner::Full(_, tree) => {
+ assert_eq!(3, tree.middle.number_of_children());
+ assert_eq!(
+ 2 * NODE_SIZE * CHUNK_SIZE + CHUNK_SIZE - 1,
+ tree.middle.len()
+ );
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ #[test]
+ fn issue_77() {
+ let mut x = Vector::new();
+ for _ in 0..44 {
+ x.push_back(0);
+ }
+ for _ in 0..20 {
+ x.insert(0, 0);
+ }
+ x.insert(1, 0);
+ for _ in 0..441 {
+ x.push_back(0);
+ }
+ for _ in 0..58 {
+ x.insert(0, 0);
+ }
+ x.insert(514, 0);
+ for _ in 0..73 {
+ x.push_back(0);
+ }
+ for _ in 0..10 {
+ x.insert(0, 0);
+ }
+ x.insert(514, 0);
+ }
+
+ #[test]
+ fn issue_105() {
+ let mut v = Vector::new();
+
+ for i in 0..270_000 {
+ v.push_front(i);
+ }
+
+ while !v.is_empty() {
+ v = v.take(v.len() - 1);
+ }
+ }
+
+ #[test]
+ fn issue_107_split_off_causes_overflow() {
+ let mut vec = (0..4289).collect::<Vector<_>>();
+ let mut control = (0..4289).collect::<Vec<_>>();
+ let chunk = 64;
+
+ while vec.len() >= chunk {
+ vec = vec.split_off(chunk);
+ control = control.split_off(chunk);
+ assert_eq!(vec.len(), control.len());
+ assert_eq!(control, vec.iter().cloned().collect::<Vec<_>>());
+ }
+ }
+
+ #[test]
+ fn collect_crash() {
+ let _vector: Vector<i32> = (0..5953).collect();
+ // let _vector: Vector<i32> = (0..16384).collect();
+ }
+
+ #[test]
+ fn issue_116() {
+ let vec = (0..300).collect::<Vector<_>>();
+ let rev_vec: Vector<u32> = vec.clone().into_iter().rev().collect();
+ assert_eq!(vec.len(), rev_vec.len());
+ }
+
+ #[test]
+ fn issue_131() {
+ let smol = std::iter::repeat(42).take(64).collect::<Vector<_>>();
+ let mut smol2 = smol.clone();
+ assert!(smol.ptr_eq(&smol2));
+ smol2.set(63, 420);
+ assert!(!smol.ptr_eq(&smol2));
+
+ let huge = std::iter::repeat(42).take(65).collect::<Vector<_>>();
+ let mut huge2 = huge.clone();
+ assert!(huge.ptr_eq(&huge2));
+ huge2.set(63, 420);
+ assert!(!huge.ptr_eq(&huge2));
+ }
+
+ #[test]
+ fn ptr_eq() {
+ for len in 32..256 {
+ let input = std::iter::repeat(42).take(len).collect::<Vector<_>>();
+ let mut inp2 = input.clone();
+ assert!(input.ptr_eq(&inp2));
+ inp2.set(len - 1, 98);
+ assert_ne!(inp2.get(len - 1), input.get(len - 1));
+ assert!(!input.ptr_eq(&inp2));
+ }
+ }
+
+ proptest! {
+ #[test]
+ fn iter(ref vec in vec(i32::ANY, 0..1000)) {
+ let seq: Vector<i32> = vec.iter().cloned().collect::<Vector<_>>();
+ for (index, item) in seq.iter().enumerate() {
+ assert_eq!(&vec[index], item);
+ }
+ assert_eq!(vec.len(), seq.len());
+ }
+
+ #[test]
+ fn push_front_mut(ref input in vec(i32::ANY, 0..1000)) {
+ let mut vector = Vector::new();
+ for (count, value) in input.iter().cloned().enumerate() {
+ assert_eq!(count, vector.len());
+ vector.push_front(value);
+ assert_eq!(count + 1, vector.len());
+ }
+ let input2 = input.iter().rev().cloned().collect::<Vec<_>>();
+ assert_eq!(input2, vector.iter().cloned().collect::<Vec<_>>());
+ }
+
+ #[test]
+ fn push_back_mut(ref input in vec(i32::ANY, 0..1000)) {
+ let mut vector = Vector::new();
+ for (count, value) in input.iter().cloned().enumerate() {
+ assert_eq!(count, vector.len());
+ vector.push_back(value);
+ assert_eq!(count + 1, vector.len());
+ }
+ assert_eq!(input, &vector.iter().cloned().collect::<Vec<_>>());
+ }
+
+ #[test]
+ fn pop_back_mut(ref input in vec(i32::ANY, 0..1000)) {
+ let mut vector = input.iter().cloned().collect::<Vector<_>>();
+ assert_eq!(input.len(), vector.len());
+ for (index, value) in input.iter().cloned().enumerate().rev() {
+ match vector.pop_back() {
+ None => panic!("vector emptied unexpectedly"),
+ Some(item) => {
+ assert_eq!(index, vector.len());
+ assert_eq!(value, item);
+ }
+ }
+ }
+ assert_eq!(0, vector.len());
+ }
+
+ #[test]
+ fn pop_front_mut(ref input in vec(i32::ANY, 0..1000)) {
+ let mut vector = input.iter().cloned().collect::<Vector<_>>();
+ assert_eq!(input.len(), vector.len());
+ for (index, value) in input.iter().cloned().rev().enumerate().rev() {
+ match vector.pop_front() {
+ None => panic!("vector emptied unexpectedly"),
+ Some(item) => {
+ assert_eq!(index, vector.len());
+ assert_eq!(value, item);
+ }
+ }
+ }
+ assert_eq!(0, vector.len());
+ }
+
+ // #[test]
+ // fn push_and_pop(ref input in vec(i32::ANY, 0..1000)) {
+ // let mut vector = Vector::new();
+ // for (count, value) in input.iter().cloned().enumerate() {
+ // assert_eq!(count, vector.len());
+ // vector.push_back(value);
+ // assert_eq!(count + 1, vector.len());
+ // }
+ // for (index, value) in input.iter().cloned().rev().enumerate().rev() {
+ // match vector.pop_front() {
+ // None => panic!("vector emptied unexpectedly"),
+ // Some(item) => {
+ // assert_eq!(index, vector.len());
+ // assert_eq!(value, item);
+ // }
+ // }
+ // }
+ // assert_eq!(true, vector.is_empty());
+ // }
+
+ #[test]
+ fn split(ref vec in vec(i32::ANY, 1..2000), split_pos in usize::ANY) {
+ let split_index = split_pos % (vec.len() + 1);
+ let mut left = vec.iter().cloned().collect::<Vector<_>>();
+ let right = left.split_off(split_index);
+ assert_eq!(left.len(), split_index);
+ assert_eq!(right.len(), vec.len() - split_index);
+ for (index, item) in left.iter().enumerate() {
+ assert_eq!(& vec[index], item);
+ }
+ for (index, item) in right.iter().enumerate() {
+ assert_eq!(&vec[split_index + index], item);
+ }
+ }
+
+ #[test]
+ fn append(ref vec1 in vec(i32::ANY, 0..1000), ref vec2 in vec(i32::ANY, 0..1000)) {
+ let mut seq1 = vec1.iter().cloned().collect::<Vector<_>>();
+ let seq2 = vec2.iter().cloned().collect::<Vector<_>>();
+ assert_eq!(seq1.len(), vec1.len());
+ assert_eq!(seq2.len(), vec2.len());
+ seq1.append(seq2);
+ let mut vec = vec1.clone();
+ vec.extend(vec2);
+ assert_eq!(seq1.len(), vec.len());
+ for (index, item) in seq1.into_iter().enumerate() {
+ assert_eq!(vec[index], item);
+ }
+ }
+
+ #[test]
+ fn iter_mut(ref input in vector(i32::ANY, 0..10000)) {
+ let mut vec = input.clone();
+ {
+ for p in vec.iter_mut() {
+ *p = p.overflowing_add(1).0;
+ }
+ }
+ let expected: Vector<i32> = input.clone().into_iter().map(|i| i.overflowing_add(1).0).collect();
+ assert_eq!(expected, vec);
+ }
+
+ #[test]
+ fn focus(ref input in vector(i32::ANY, 0..10000)) {
+ let mut vec = input.clone();
+ {
+ let mut focus = vec.focus_mut();
+ for i in 0..input.len() {
+ let p = focus.index_mut(i);
+ *p = p.overflowing_add(1).0;
+ }
+ }
+ let expected: Vector<i32> = input.clone().into_iter().map(|i| i.overflowing_add(1).0).collect();
+ assert_eq!(expected, vec);
+ }
+
+ #[test]
+ fn focus_mut_split(ref input in vector(i32::ANY, 0..10000)) {
+ let mut vec = input.clone();
+
+ fn split_down(focus: FocusMut<'_, i32>) {
+ let len = focus.len();
+ if len < 8 {
+ for p in focus {
+ *p = p.overflowing_add(1).0;
+ }
+ } else {
+ let (left, right) = focus.split_at(len / 2);
+ split_down(left);
+ split_down(right);
+ }
+ }
+
+ split_down(vec.focus_mut());
+
+ let expected: Vector<i32> = input.clone().into_iter().map(|i| i.overflowing_add(1).0).collect();
+ assert_eq!(expected, vec);
+ }
+
+ #[test]
+ fn chunks(ref input in vector(i32::ANY, 0..10000)) {
+ let output: Vector<_> = input.leaves().flatten().cloned().collect();
+ assert_eq!(input, &output);
+ let rev_in: Vector<_> = input.iter().rev().cloned().collect();
+ let rev_out: Vector<_> = input.leaves().rev().map(|c| c.iter().rev()).flatten().cloned().collect();
+ assert_eq!(rev_in, rev_out);
+ }
+
+ #[test]
+ fn chunks_mut(ref mut input_src in vector(i32::ANY, 0..10000)) {
+ let mut input = input_src.clone();
+ #[allow(clippy::map_clone)]
+ let output: Vector<_> = input.leaves_mut().flatten().map(|v| *v).collect();
+ assert_eq!(input, output);
+ let rev_in: Vector<_> = input.iter().rev().cloned().collect();
+ let rev_out: Vector<_> = input.leaves_mut().rev().map(|c| c.iter().rev()).flatten().cloned().collect();
+ assert_eq!(rev_in, rev_out);
+ }
+
+ // The following two tests are very slow and there are unit tests above
+ // which test for regression of issue #55. It would still be good to
+ // run them occasionally.
+
+ // #[test]
+ // fn issue55_back(count in 0..10000, slice_at in usize::ANY) {
+ // let count = count as usize;
+ // let slice_at = slice_at % count;
+ // let mut l = Vector::unit(0);
+ // for _ in 0..count {
+ // let mut tmp = Vector::unit(0);
+ // tmp.append(l);
+ // l = tmp;
+ // }
+ // let len = l.len();
+ // l.slice(slice_at..len);
+ // }
+
+ // #[test]
+ // fn issue55_fwd(count in 0..10000, slice_at in usize::ANY) {
+ // let count = count as usize;
+ // let slice_at = slice_at % count;
+ // let mut l = Vector::new();
+ // for i in 0..count {
+ // l.append(Vector::unit(i));
+ // }
+ // assert_eq!(Some(&slice_at), l.get(slice_at));
+ // }
+ }
+}
diff --git a/vendor/im-rc/src/vector/pool.rs b/vendor/im-rc/src/vector/pool.rs
new file mode 100644
index 000000000..4d4edae6b
--- /dev/null
+++ b/vendor/im-rc/src/vector/pool.rs
@@ -0,0 +1,74 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+use crate::config::POOL_SIZE;
+use crate::nodes::chunk::Chunk;
+use crate::nodes::rrb::Node;
+use crate::util::Pool;
+
+/// A memory pool for `Vector`.
+pub struct RRBPool<A> {
+ pub(crate) node_pool: Pool<Chunk<Node<A>>>,
+ pub(crate) value_pool: Pool<Chunk<A>>,
+ pub(crate) size_pool: Pool<Chunk<usize>>,
+}
+
+impl<A> RRBPool<A> {
+ /// Create a new memory pool with the given size.
+ pub fn new(size: usize) -> Self {
+ Self::with_sizes(size, size, size)
+ }
+
+ /// Create a new memory pool with the given sizes for each subpool.
+ pub fn with_sizes(
+ node_pool_size: usize,
+ leaf_pool_size: usize,
+ size_table_pool_size: usize,
+ ) -> Self {
+ Self {
+ node_pool: Pool::new(node_pool_size),
+ value_pool: Pool::new(leaf_pool_size),
+ size_pool: Pool::new(size_table_pool_size),
+ }
+ }
+
+ /// Fill the memory pool with preallocated chunks.
+ pub fn fill(&self) {
+ self.node_pool.fill();
+ self.value_pool.fill();
+ self.size_pool.fill();
+ }
+
+ /// Get the size of the node subpool.
+ pub fn node_pool_size(&self) -> usize {
+ self.node_pool.get_pool_size()
+ }
+
+ /// Get the size of the leaf node subpool.
+ pub fn leaf_pool_size(&self) -> usize {
+ self.value_pool.get_pool_size()
+ }
+
+ /// Get the size of the size table subpool.
+ pub fn size_table_pool_size(&self) -> usize {
+ self.size_pool.get_pool_size()
+ }
+}
+
+impl<A> Default for RRBPool<A> {
+ /// Construct a pool with a reasonable default pool size.
+ fn default() -> Self {
+ Self::new(POOL_SIZE)
+ }
+}
+
+impl<A> Clone for RRBPool<A> {
+ fn clone(&self) -> Self {
+ Self {
+ node_pool: self.node_pool.clone(),
+ value_pool: self.value_pool.clone(),
+ size_pool: self.size_pool.clone(),
+ }
+ }
+}
diff --git a/vendor/im-rc/src/vector/rayon.rs b/vendor/im-rc/src/vector/rayon.rs
new file mode 100644
index 000000000..054620dc0
--- /dev/null
+++ b/vendor/im-rc/src/vector/rayon.rs
@@ -0,0 +1,209 @@
+//! Parallel iterators.
+//!
+//! These are only available when using the `rayon` feature flag.
+
+use super::*;
+use ::rayon::iter::plumbing::{bridge, Consumer, Producer, ProducerCallback, UnindexedConsumer};
+use ::rayon::iter::{
+ IndexedParallelIterator, IntoParallelRefIterator, IntoParallelRefMutIterator, ParallelIterator,
+};
+
+impl<'a, A> IntoParallelRefIterator<'a> for Vector<A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ type Item = &'a A;
+ type Iter = ParIter<'a, A>;
+
+ fn par_iter(&'a self) -> Self::Iter {
+ ParIter {
+ focus: self.focus(),
+ }
+ }
+}
+
+impl<'a, A> IntoParallelRefMutIterator<'a> for Vector<A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ type Item = &'a mut A;
+ type Iter = ParIterMut<'a, A>;
+
+ fn par_iter_mut(&'a mut self) -> Self::Iter {
+ ParIterMut {
+ focus: self.focus_mut(),
+ }
+ }
+}
+
+/// A parallel iterator for [`Vector`][Vector].
+///
+/// [Vector]: ../struct.Vector.html
+pub struct ParIter<'a, A>
+where
+ A: Clone + Send + Sync,
+{
+ focus: Focus<'a, A>,
+}
+
+impl<'a, A> ParallelIterator for ParIter<'a, A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ type Item = &'a A;
+
+ fn drive_unindexed<C>(self, consumer: C) -> C::Result
+ where
+ C: UnindexedConsumer<Self::Item>,
+ {
+ bridge(self, consumer)
+ }
+}
+
+impl<'a, A> IndexedParallelIterator for ParIter<'a, A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ fn drive<C>(self, consumer: C) -> C::Result
+ where
+ C: Consumer<Self::Item>,
+ {
+ bridge(self, consumer)
+ }
+
+ fn len(&self) -> usize {
+ self.focus.len()
+ }
+
+ fn with_producer<CB>(self, callback: CB) -> CB::Output
+ where
+ CB: ProducerCallback<Self::Item>,
+ {
+ callback.callback(VectorProducer { focus: self.focus })
+ }
+}
+
+/// A mutable parallel iterator for [`Vector`][Vector].
+///
+/// [Vector]: ../struct.Vector.html
+pub struct ParIterMut<'a, A>
+where
+ A: Clone + Send + Sync,
+{
+ focus: FocusMut<'a, A>,
+}
+
+impl<'a, A> ParallelIterator for ParIterMut<'a, A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ type Item = &'a mut A;
+
+ fn drive_unindexed<C>(self, consumer: C) -> C::Result
+ where
+ C: UnindexedConsumer<Self::Item>,
+ {
+ bridge(self, consumer)
+ }
+}
+
+impl<'a, A> IndexedParallelIterator for ParIterMut<'a, A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ fn drive<C>(self, consumer: C) -> C::Result
+ where
+ C: Consumer<Self::Item>,
+ {
+ bridge(self, consumer)
+ }
+
+ fn len(&self) -> usize {
+ self.focus.len()
+ }
+
+ fn with_producer<CB>(self, callback: CB) -> CB::Output
+ where
+ CB: ProducerCallback<Self::Item>,
+ {
+ callback.callback(VectorMutProducer { focus: self.focus })
+ }
+}
+
+struct VectorProducer<'a, A>
+where
+ A: Clone + Send + Sync,
+{
+ focus: Focus<'a, A>,
+}
+
+impl<'a, A> Producer for VectorProducer<'a, A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ type Item = &'a A;
+ type IntoIter = Iter<'a, A>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.focus.into_iter()
+ }
+
+ fn split_at(self, index: usize) -> (Self, Self) {
+ let (left, right) = self.focus.split_at(index);
+ (
+ VectorProducer { focus: left },
+ VectorProducer { focus: right },
+ )
+ }
+}
+
+struct VectorMutProducer<'a, A>
+where
+ A: Clone + Send + Sync,
+{
+ focus: FocusMut<'a, A>,
+}
+
+impl<'a, A> Producer for VectorMutProducer<'a, A>
+where
+ A: Clone + Send + Sync + 'a,
+{
+ type Item = &'a mut A;
+ type IntoIter = IterMut<'a, A>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.focus.into_iter()
+ }
+
+ fn split_at(self, index: usize) -> (Self, Self) {
+ let (left, right) = self.focus.split_at(index);
+ (
+ VectorMutProducer { focus: left },
+ VectorMutProducer { focus: right },
+ )
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::super::*;
+ use super::proptest::vector;
+ use ::proptest::num::i32;
+ use ::proptest::proptest;
+ use ::rayon::iter::{IntoParallelRefIterator, IntoParallelRefMutIterator, ParallelIterator};
+
+ proptest! {
+ #[test]
+ fn par_iter(ref mut input in vector(i32::ANY, 0..10000)) {
+ assert_eq!(input.iter().max(), input.par_iter().max())
+ }
+
+ #[test]
+ fn par_mut_iter(ref mut input in vector(i32::ANY, 0..10000)) {
+ let mut vec = input.clone();
+ vec.par_iter_mut().for_each(|i| *i = i.overflowing_add(1).0);
+ let expected: Vector<i32> = input.clone().into_iter().map(|i| i.overflowing_add(1).0).collect();
+ assert_eq!(expected, vec);
+ }
+ }
+}