diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:19:50 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:19:50 +0000 |
commit | 2e00214b3efbdfeefaa0fe9e8b8fd519de7adc35 (patch) | |
tree | d325add32978dbdc1db975a438b3a77d571b1ab8 /vendor/elsa/src | |
parent | Releasing progress-linux version 1.68.2+dfsg1-1~progress7.99u1. (diff) | |
download | rustc-2e00214b3efbdfeefaa0fe9e8b8fd519de7adc35.tar.xz rustc-2e00214b3efbdfeefaa0fe9e8b8fd519de7adc35.zip |
Merging upstream version 1.69.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/elsa/src')
-rw-r--r-- | vendor/elsa/src/index_map.rs | 215 | ||||
-rw-r--r-- | vendor/elsa/src/index_set.rs | 180 | ||||
-rw-r--r-- | vendor/elsa/src/lib.rs | 29 | ||||
-rw-r--r-- | vendor/elsa/src/map.rs | 451 | ||||
-rw-r--r-- | vendor/elsa/src/sync.rs | 624 | ||||
-rw-r--r-- | vendor/elsa/src/vec.rs | 347 |
6 files changed, 1846 insertions, 0 deletions
diff --git a/vendor/elsa/src/index_map.rs b/vendor/elsa/src/index_map.rs new file mode 100644 index 000000000..3c97dfbaa --- /dev/null +++ b/vendor/elsa/src/index_map.rs @@ -0,0 +1,215 @@ +use std::borrow::Borrow; +use std::cell::{Cell, UnsafeCell}; +use std::collections::hash_map::RandomState; +use std::hash::{BuildHasher, Hash}; +use std::iter::FromIterator; +use std::ops::Index; + +use indexmap::IndexMap; +use stable_deref_trait::StableDeref; + +/// Append-only version of `indexmap::IndexMap` where +/// insertion does not require mutable access +pub struct FrozenIndexMap<K, V, S = RandomState> { + map: UnsafeCell<IndexMap<K, V, S>>, + /// Eq/Hash implementations can have side-effects, and using Rc it is possible + /// for FrozenIndexMap::insert to be called on a key that itself contains the same + /// `FrozenIndexMap`, whose `eq` implementation also calls FrozenIndexMap::insert + /// + /// We use this `in_use` flag to guard against any reentrancy. + in_use: Cell<bool>, +} + +// safety: UnsafeCell implies !Sync + +impl<K: Eq + Hash, V> FrozenIndexMap<K, V> { + pub fn new() -> Self { + Self { + map: UnsafeCell::new(Default::default()), + in_use: Cell::new(false), + } + } +} + +impl<K: Eq + Hash, V: StableDeref, S: BuildHasher> FrozenIndexMap<K, V, S> { + // these should never return &K or &V + // these should never delete any entries + pub fn insert(&self, k: K, v: V) -> &V::Target { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + &*(*map).entry(k).or_insert(v) + }; + self.in_use.set(false); + ret + } + + // these should never return &K or &V + // these should never delete any entries + pub fn insert_full(&self, k: K, v: V) -> (usize, &V::Target) { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + let entry = (*map).entry(k); + let index = entry.index(); + (index, &**entry.or_insert(v)) + }; + self.in_use.set(false); + ret + } + + /// Returns a reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::FrozenIndexMap; + /// + /// let map = FrozenIndexMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.get(&1), Some(&"a")); + /// assert_eq!(map.get(&2), None); + /// ``` + pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target> + where + K: Borrow<Q>, + Q: Hash + Eq, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get(k).map(|x| &**x) + }; + self.in_use.set(false); + ret + } + + pub fn get_index(&self, index: usize) -> Option<(&K::Target, &V::Target)> + where + K: StableDeref, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get_index(index).map(|(k, v)| (&**k, &**v)) + }; + self.in_use.set(false); + ret + } + + /// Applies a function to the owner of the value corresponding to the key (if any). + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::FrozenIndexMap; + /// + /// let map = FrozenIndexMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a"))); + /// assert_eq!(map.map_get(&2, Clone::clone), None); + /// ``` + pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T> + where + K: Borrow<Q>, + Q: Hash + Eq, + F: FnOnce(&V) -> T, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get(k).map(f) + }; + self.in_use.set(false); + ret + } + + pub fn into_map(self) -> IndexMap<K, V, S> { + self.map.into_inner() + } + + /// Get mutable access to the underlying [`IndexMap`]. + /// + /// This is safe, as it requires a `&mut self`, ensuring nothing is using + /// the 'frozen' contents. + pub fn as_mut(&mut self) -> &mut IndexMap<K, V, S> { + unsafe { &mut *self.map.get() } + } + + /// Returns true if the map contains no elements. + pub fn is_empty(&self) -> bool { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).is_empty() + }; + self.in_use.set(false); + ret + } +} + +impl<K, V, S> From<IndexMap<K, V, S>> for FrozenIndexMap<K, V, S> { + fn from(map: IndexMap<K, V, S>) -> Self { + Self { + map: UnsafeCell::new(map), + in_use: Cell::new(false), + } + } +} + +impl<Q: ?Sized, K: Eq + Hash, V: StableDeref, S: BuildHasher> Index<&Q> for FrozenIndexMap<K, V, S> + where + Q: Eq + Hash, + K: Eq + Hash + Borrow<Q>, + V: StableDeref, + S: BuildHasher +{ + type Output = V::Target; + + /// # Examples + /// + /// ``` + /// use elsa::FrozenIndexMap; + /// + /// let map = FrozenIndexMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map[&1], "a"); + /// ``` + fn index(&self, idx: &Q) -> &V::Target { + self.get(&idx) + .expect("attempted to index FrozenIndexMap with unknown key") + } +} + +impl<K: Eq + Hash, V, S: BuildHasher + Default> FromIterator<(K, V)> for FrozenIndexMap<K, V, S> { + fn from_iter<T>(iter: T) -> Self + where + T: IntoIterator<Item = (K, V)>, + { + let map: IndexMap<_, _, _> = iter.into_iter().collect(); + map.into() + } +} + +impl<K: Eq + Hash, V, S: Default> Default for FrozenIndexMap<K, V, S> { + fn default() -> Self { + Self { + map: UnsafeCell::new(Default::default()), + in_use: Cell::new(false), + } + } +} diff --git a/vendor/elsa/src/index_set.rs b/vendor/elsa/src/index_set.rs new file mode 100644 index 000000000..1222cde05 --- /dev/null +++ b/vendor/elsa/src/index_set.rs @@ -0,0 +1,180 @@ +use std::borrow::Borrow; +use std::cell::{Cell, UnsafeCell}; +use std::collections::hash_map::RandomState; +use std::hash::{BuildHasher, Hash}; +use std::iter::FromIterator; +use std::ops::Index; + +use indexmap::IndexSet; +use stable_deref_trait::StableDeref; + +/// Append-only version of `indexmap::IndexSet` where +/// insertion does not require mutable access +pub struct FrozenIndexSet<T, S = RandomState> { + set: UnsafeCell<IndexSet<T, S>>, + /// Eq/Hash implementations can have side-effects, and using Rc it is possible + /// for FrozenIndexSet::insert to be called on a key that itself contains the same + /// `FrozenIndexSet`, whose `eq` implementation also calls FrozenIndexSet::insert + /// + /// We use this `in_use` flag to guard against any reentrancy. + in_use: Cell<bool>, +} + +// safety: UnsafeCell implies !Sync + +impl<T: Eq + Hash> FrozenIndexSet<T> { + pub fn new() -> Self { + Self::from(IndexSet::new()) + } +} + +impl<T: Eq + Hash + StableDeref, S: BuildHasher> FrozenIndexSet<T, S> { + // these should never return &T + // these should never delete any entries + pub fn insert(&self, value: T) -> &T::Target { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + let (index, _was_vacant) = (*set).insert_full(value); + &*(*set)[index] + }; + self.in_use.set(false); + ret + } + + // these should never return &T + // these should never delete any entries + pub fn insert_full(&self, value: T) -> (usize, &T::Target) { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + let (index, _was_vacant) = (*set).insert_full(value); + (index, &*(*set)[index]) + }; + self.in_use.set(false); + ret + } + + // TODO implement in case the standard Entry API gets improved + // // TODO avoid double lookup + // pub fn entry<Q: ?Sized>(&self, value: &Q) -> Entry<T, Q> + // where Q: Hash + Equivalent<T> + ToOwned<Owned = T> + // { + // assert!(!self.in_use.get()); + // self.in_use.set(true); + // unsafe { + // let set = self.set.get(); + // match (*set).get_full(value) { + // Some((index, reference)) => { + // Entry::Occupied(OccupiedEntry { + // index, + // reference, + // set: &*set, + // }) + // } + // None => { + // Entry::Vacant(VacantEntry { + // value: Cow::Borrowed(value), + // set: &*set, + // }) + // } + // } + // } + // } + + pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&T::Target> + where + T: Borrow<Q>, + Q: Hash + Eq, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + (*set).get(k).map(|x| &**x) + }; + self.in_use.set(false); + ret + } + + pub fn get_full<Q: ?Sized>(&self, k: &Q) -> Option<(usize, &T::Target)> + where + T: Borrow<Q>, + Q: Hash + Eq, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + (*set).get_full(k).map(|(i, x)| (i, &**x)) + }; + self.in_use.set(false); + ret + } + + pub fn get_index(&self, index: usize) -> Option<&T::Target> { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + (*set).get_index(index).map(|r| &**r) + }; + self.in_use.set(false); + ret + } + + pub fn into_set(self) -> IndexSet<T, S> { + self.set.into_inner() + } + + /// Get mutable access to the underlying [`IndexSet`]. + /// + /// This is safe, as it requires a `&mut self`, ensuring nothing is using + /// the 'frozen' contents. + pub fn as_mut(&mut self) -> &mut IndexSet<T, S> { + unsafe { &mut *self.set.get() } + } + + // TODO add more +} + +impl<T, S> From<IndexSet<T, S>> for FrozenIndexSet<T, S> { + fn from(set: IndexSet<T, S>) -> Self { + Self { + set: UnsafeCell::new(set), + in_use: Cell::new(false), + } + } +} + +impl<T: Eq + Hash + StableDeref, S> Index<usize> for FrozenIndexSet<T, S> { + type Output = T::Target; + fn index(&self, idx: usize) -> &T::Target { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + &*(*set)[idx] + }; + self.in_use.set(false); + ret + } +} + +impl<T: Eq + Hash, S: Default + BuildHasher> FromIterator<T> for FrozenIndexSet<T, S> { + fn from_iter<U>(iter: U) -> Self + where + U: IntoIterator<Item = T>, + { + let set: IndexSet<_, _> = iter.into_iter().collect(); + set.into() + } +} + +impl<T: Eq + Hash, S: Default> Default for FrozenIndexSet<T, S> { + fn default() -> Self { + Self::from(IndexSet::default()) + } +} diff --git a/vendor/elsa/src/lib.rs b/vendor/elsa/src/lib.rs new file mode 100644 index 000000000..848dceb34 --- /dev/null +++ b/vendor/elsa/src/lib.rs @@ -0,0 +1,29 @@ +//! _🎵 Immutability never bothered me anyway 🎶_ +//! +//! This crate provides various "Frozen" collections. +//! +//! These are append-only collections where references to entries can be held +//! on to even across insertions. This is safe because these collections only +//! support storing data that's present behind some indirection -- i.e. `String`, +//! `Vec<T>`, `Box<T>`, etc, and they only yield references to the data behind the +//! allocation (`&str`, `&[T]`, and `&T` respectively) +//! +//! The typical use case is having a global cache of strings or other data which the rest of the program borrows from. + +pub mod map; +pub mod vec; + +#[cfg(feature = "indexmap")] +pub mod index_map; +#[cfg(feature = "indexmap")] +pub mod index_set; + +pub mod sync; + +pub use map::{FrozenBTreeMap, FrozenMap}; +pub use vec::FrozenVec; + +#[cfg(feature = "indexmap")] +pub use index_map::FrozenIndexMap; +#[cfg(feature = "indexmap")] +pub use index_set::FrozenIndexSet; diff --git a/vendor/elsa/src/map.rs b/vendor/elsa/src/map.rs new file mode 100644 index 000000000..2faa19ce2 --- /dev/null +++ b/vendor/elsa/src/map.rs @@ -0,0 +1,451 @@ +use std::borrow::Borrow; +use std::cell::{Cell, UnsafeCell}; +use std::collections::hash_map::RandomState; +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::hash::{BuildHasher, Hash}; +use std::iter::FromIterator; +use std::ops::Index; + +use stable_deref_trait::StableDeref; + +/// Append-only version of `std::collections::HashMap` where +/// insertion does not require mutable access +pub struct FrozenMap<K, V, S = RandomState> { + map: UnsafeCell<HashMap<K, V, S>>, + /// Eq/Hash implementations can have side-effects, and using Rc it is possible + /// for FrozenMap::insert to be called on a key that itself contains the same + /// `FrozenMap`, whose `eq` implementation also calls FrozenMap::insert + /// + /// We use this `in_use` flag to guard against any reentrancy. + in_use: Cell<bool>, +} + +// safety: UnsafeCell implies !Sync + +impl<K: Eq + Hash, V> FrozenMap<K, V> { + pub fn new() -> Self { + Self { + map: UnsafeCell::new(Default::default()), + in_use: Cell::new(false), + } + } + + /// # Examples + /// + /// ``` + /// use elsa::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// assert_eq!(map.len(), 0); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.len(), 1); + /// ``` + pub fn len(&self) -> usize { + assert!(!self.in_use.get()); + self.in_use.set(true); + let len = unsafe { + let map = self.map.get(); + (*map).len() + }; + self.in_use.set(false); + len + } + + /// # Examples + /// + /// ``` + /// use elsa::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// assert_eq!(map.is_empty(), true); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.is_empty(), false); + /// ``` + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl<K: Eq + Hash, V: StableDeref, S: BuildHasher> FrozenMap<K, V, S> { + // these should never return &K or &V + // these should never delete any entries + pub fn insert(&self, k: K, v: V) -> &V::Target { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + &*(*map).entry(k).or_insert(v) + }; + self.in_use.set(false); + ret + } + + /// Returns a reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.get(&1), Some(&"a")); + /// assert_eq!(map.get(&2), None); + /// ``` + pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target> + where + K: Borrow<Q>, + Q: Hash + Eq, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get(k).map(|x| &**x) + }; + self.in_use.set(false); + ret + } + + /// Applies a function to the owner of the value corresponding to the key (if any). + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a"))); + /// assert_eq!(map.map_get(&2, Clone::clone), None); + /// ``` + pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T> + where + K: Borrow<Q>, + Q: Hash + Eq, + F: FnOnce(&V) -> T, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get(k).map(f) + }; + self.in_use.set(false); + ret + } + + pub fn into_map(self) -> HashMap<K, V, S> { + self.map.into_inner() + } + + // TODO add more +} + +impl<K: Eq + Hash + StableDeref, V: StableDeref, S: BuildHasher> FrozenMap<K, V, S> { + /// Returns a reference to the key and value matching a borrowed + /// key. + /// + /// The key argument may be any borrowed form of the map's key type, + /// but [`Hash`] and [`Eq`] on the borrowed form *must* match those + /// for the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// map.insert(Box::new("1"), Box::new("a")); + /// assert_eq!(map.get_key_value(&"1"), Some((&"1", &"a"))); + /// assert_eq!(map.get_key_value(&"2"), None); + /// ``` + pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K::Target, &V::Target)> + where + K: Borrow<Q>, + Q: Hash + Eq, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get_key_value(k).map(|(k, v)| (&**k, &**v)) + }; + self.in_use.set(false); + ret + } +} + +impl<K, V, S> std::convert::AsMut<HashMap<K, V, S>> for FrozenMap<K, V, S> { + /// Get mutable access to the underlying [`HashMap`]. + /// + /// This is safe, as it requires a `&mut self`, ensuring nothing is using + /// the 'frozen' contents. + fn as_mut(&mut self) -> &mut HashMap<K, V, S> { + unsafe { &mut *self.map.get() } + } +} + +impl<K, V, S> From<HashMap<K, V, S>> for FrozenMap<K, V, S> { + fn from(map: HashMap<K, V, S>) -> Self { + Self { + map: UnsafeCell::new(map), + in_use: Cell::new(false), + } + } +} + +impl<Q: ?Sized, K, V, S> Index<&Q> for FrozenMap<K, V, S> +where + Q: Eq + Hash, + K: Eq + Hash + Borrow<Q>, + V: StableDeref, + S: BuildHasher, +{ + type Output = V::Target; + + /// # Examples + /// + /// ``` + /// use elsa::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map[&1], "a"); + /// ``` + fn index(&self, idx: &Q) -> &V::Target { + self.get(idx) + .expect("attempted to index FrozenMap with unknown key") + } +} + +impl<K: Eq + Hash, V, S: BuildHasher + Default> FromIterator<(K, V)> for FrozenMap<K, V, S> { + fn from_iter<T>(iter: T) -> Self + where + T: IntoIterator<Item = (K, V)>, + { + let map: HashMap<_, _, _> = iter.into_iter().collect(); + map.into() + } +} + +impl<K: Eq + Hash, V, S: Default> Default for FrozenMap<K, V, S> { + fn default() -> Self { + Self { + map: UnsafeCell::new(Default::default()), + in_use: Cell::new(false), + } + } +} + +/// Append-only version of `std::collections::BTreeMap` where +/// insertion does not require mutable access +pub struct FrozenBTreeMap<K, V> { + map: UnsafeCell<BTreeMap<K, V>>, + /// Eq/Hash implementations can have side-effects, and using Rc it is possible + /// for FrozenBTreeMap::insert to be called on a key that itself contains the same + /// `FrozenBTreeMap`, whose `eq` implementation also calls FrozenBTreeMap::insert + /// + /// We use this `in_use` flag to guard against any reentrancy. + in_use: Cell<bool>, +} + +// safety: UnsafeCell implies !Sync + +impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> { + pub fn new() -> Self { + Self { + map: UnsafeCell::new(Default::default()), + in_use: Cell::new(false), + } + } + + /// # Examples + /// + /// ``` + /// use elsa::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// assert_eq!(map.len(), 0); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.len(), 1); + /// ``` + pub fn len(&self) -> usize { + assert!(!self.in_use.get()); + self.in_use.set(true); + let len = unsafe { + let map = self.map.get(); + (*map).len() + }; + self.in_use.set(false); + len + } + + /// # Examples + /// + /// ``` + /// use elsa::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// assert_eq!(map.is_empty(), true); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.is_empty(), false); + /// ``` + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> { + // these should never return &K or &V + // these should never delete any entries + pub fn insert(&self, k: K, v: V) -> &V::Target { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + &*(*map).entry(k).or_insert(v) + }; + self.in_use.set(false); + ret + } + + /// Returns a reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.get(&1), Some(&"a")); + /// assert_eq!(map.get(&2), None); + /// ``` + pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target> + where + K: Borrow<Q>, + Q: Ord, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get(k).map(|x| &**x) + }; + self.in_use.set(false); + ret + } + + /// Applies a function to the owner of the value corresponding to the key (if any). + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a"))); + /// assert_eq!(map.map_get(&2, Clone::clone), None); + /// ``` + pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T> + where + K: Borrow<Q>, + Q: Ord, + F: FnOnce(&V) -> T, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let map = self.map.get(); + (*map).get(k).map(f) + }; + self.in_use.set(false); + ret + } + + pub fn into_map(self) -> BTreeMap<K, V> { + self.map.into_inner() + } + + // TODO add more +} + +impl<K, V> std::convert::AsMut<BTreeMap<K, V>> for FrozenBTreeMap<K, V> { + /// Get mutable access to the underlying [`HashMap`]. + /// + /// This is safe, as it requires a `&mut self`, ensuring nothing is using + /// the 'frozen' contents. + fn as_mut(&mut self) -> &mut BTreeMap<K, V> { + unsafe { &mut *self.map.get() } + } +} + +impl<K: Clone + Ord, V: StableDeref> From<BTreeMap<K, V>> for FrozenBTreeMap<K, V> { + fn from(map: BTreeMap<K, V>) -> Self { + Self { + map: UnsafeCell::new(map), + in_use: Cell::new(false), + } + } +} + +impl<Q: ?Sized, K, V> Index<&Q> for FrozenBTreeMap<K, V> +where + Q: Ord, + K: Clone + Ord + Borrow<Q>, + V: StableDeref, +{ + type Output = V::Target; + + /// # Examples + /// + /// ``` + /// use elsa::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map[&1], "a"); + /// ``` + fn index(&self, idx: &Q) -> &V::Target { + self.get(idx) + .expect("attempted to index FrozenBTreeMap with unknown key") + } +} + +impl<K: Clone + Ord, V: StableDeref> FromIterator<(K, V)> for FrozenBTreeMap<K, V> { + fn from_iter<T>(iter: T) -> Self + where + T: IntoIterator<Item = (K, V)>, + { + let map: BTreeMap<_, _> = iter.into_iter().collect(); + map.into() + } +} + +impl<K: Clone + Ord, V: StableDeref> Default for FrozenBTreeMap<K, V> { + fn default() -> Self { + Self { + map: UnsafeCell::new(Default::default()), + in_use: Cell::new(false), + } + } +} diff --git a/vendor/elsa/src/sync.rs b/vendor/elsa/src/sync.rs new file mode 100644 index 000000000..afa4bb7c7 --- /dev/null +++ b/vendor/elsa/src/sync.rs @@ -0,0 +1,624 @@ +//! **This module is experimental** +//! +//! This module provides threadsafe versions of FrozenMap and FrozenVec, +//! ideal for use as a cache. +//! +//! These lock internally, however locks only last as long as the method calls +//! + +use stable_deref_trait::StableDeref; +use std::alloc::Layout; +use std::borrow::Borrow; +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::hash::Hash; +use std::iter::{FromIterator, IntoIterator}; +use std::mem::MaybeUninit; +use std::ops::Index; + +use std::sync::atomic::AtomicPtr; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering; +use std::sync::RwLock; + +/// Append-only threadsafe version of `std::collections::HashMap` where +/// insertion does not require mutable access +pub struct FrozenMap<K, V> { + map: RwLock<HashMap<K, V>>, +} + +impl<K, V> Default for FrozenMap<K, V> { + fn default() -> Self { + Self { + map: Default::default(), + } + } +} + +impl<K: Eq + Hash, V: StableDeref> FrozenMap<K, V> { + // these should never return &K or &V + // these should never delete any entries + + pub fn new() -> Self { + Self::default() + } + + /// If the key exists in the map, returns a reference + /// to the corresponding value, otherwise inserts a + /// new entry in the map for that key and returns a + /// reference to the given value. + /// + /// Existing values are never overwritten. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// assert_eq!(map.insert(1, Box::new("a")), &"a"); + /// assert_eq!(map.insert(1, Box::new("b")), &"a"); + /// ``` + pub fn insert(&self, k: K, v: V) -> &V::Target { + let mut map = self.map.write().unwrap(); + let ret = unsafe { + let inserted = &**map.entry(k).or_insert(v); + &*(inserted as *const _) + }; + ret + } + + /// If the key exists in the map, returns a reference to the corresponding + /// value, otherwise inserts a new entry in the map for that key and the + /// value returned by the creation function, and returns a reference to the + /// generated value. + /// + /// Existing values are never overwritten. + /// + /// The key may be any borrowed form of the map's key type, but [`Hash`] and + /// [`Eq`] on the borrowed form *must* match those for the key type. + /// + /// **Note** that the write lock is held for the duration of this function’s + /// execution, even while the value creation function is executing (if + /// needed). This will block any concurrent `get` or `insert` calls. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// assert_eq!(map.insert_with(1, || Box::new("a")), &"a"); + /// assert_eq!(map.insert_with(1, || unreachable!()), &"a"); + /// ``` + pub fn insert_with(&self, k: K, f: impl FnOnce() -> V) -> &V::Target { + let mut map = self.map.write().unwrap(); + let ret = unsafe { + let inserted = &**map.entry(k).or_insert_with(f); + &*(inserted as *const _) + }; + ret + } + + /// If the key exists in the map, returns a reference to the corresponding + /// value, otherwise inserts a new entry in the map for that key and the + /// value returned by the creation function, and returns a reference to the + /// generated value. + /// + /// Existing values are never overwritten. + /// + /// The key may be any borrowed form of the map's key type, but [`Hash`] and + /// [`Eq`] on the borrowed form *must* match those for the key type. + /// + /// **Note** that the write lock is held for the duration of this function’s + /// execution, even while the value creation function is executing (if + /// needed). This will block any concurrent `get` or `insert` calls. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// assert_eq!(map.insert_with_key(1, |_| Box::new("a")), &"a"); + /// assert_eq!(map.insert_with_key(1, |_| unreachable!()), &"a"); + /// ``` + pub fn insert_with_key(&self, k: K, f: impl FnOnce(&K) -> V) -> &V::Target { + let mut map = self.map.write().unwrap(); + let ret = unsafe { + let inserted = &**map.entry(k).or_insert_with_key(f); + &*(inserted as *const _) + }; + ret + } + + /// Returns a reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.get(&1), Some(&"a")); + /// assert_eq!(map.get(&2), None); + /// ``` + pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target> + where + K: Borrow<Q>, + Q: Hash + Eq, + { + let map = self.map.read().unwrap(); + let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) }; + ret + } + + /// Applies a function to the owner of the value corresponding to the key (if any). + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a"))); + /// assert_eq!(map.map_get(&2, Clone::clone), None); + /// ``` + pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T> + where + K: Borrow<Q>, + Q: Hash + Eq, + F: FnOnce(&V) -> T, + { + let map = self.map.read().unwrap(); + let ret = map.get(k).map(f); + ret + } + + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// assert_eq!(map.len(), 0); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.len(), 1); + /// ``` + pub fn len(&self) -> usize { + let map = self.map.read().unwrap(); + map.len() + } + + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenMap; + /// + /// let map = FrozenMap::new(); + /// assert_eq!(map.is_empty(), true); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.is_empty(), false); + /// ``` + pub fn is_empty(&self) -> bool { + let map = self.map.read().unwrap(); + map.is_empty() + } + + // TODO add more +} + +/// Append-only threadsafe version of `std::vec::Vec` where +/// insertion does not require mutable access +pub struct FrozenVec<T> { + vec: RwLock<Vec<T>>, +} + +impl<T> Default for FrozenVec<T> { + fn default() -> Self { + Self { + vec: Default::default(), + } + } +} + +impl<T: StableDeref> FrozenVec<T> { + pub fn new() -> Self { + Default::default() + } + + // these should never return &T + // these should never delete any entries + + pub fn push(&self, val: T) { + let mut vec = self.vec.write().unwrap(); + vec.push(val); + } + + /// Push, immediately getting a reference to the element + pub fn push_get(&self, val: T) -> &T::Target { + let mut vec = self.vec.write().unwrap(); + vec.push(val); + unsafe { &*(&**vec.get_unchecked(vec.len() - 1) as *const T::Target) } + } + + /// Push, immediately getting a an index of the element + /// + /// Index can then be used with the `get` method + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenVec; + /// + /// let map = FrozenVec::new(); + /// let idx = map.push_get_index(String::from("a")); + /// assert_eq!(map.get(idx), Some("a")); + /// assert_eq!(idx, 0); + /// assert_eq!(map.push_get_index(String::from("b")), 1); + /// ``` + pub fn push_get_index(&self, val: T) -> usize { + let mut vec = self.vec.write().unwrap(); + let index = vec.len(); + vec.push(val); + return index; + } + + pub fn get(&self, index: usize) -> Option<&T::Target> { + let vec = self.vec.read().unwrap(); + unsafe { vec.get(index).map(|x| &*(&**x as *const T::Target)) } + } + + // TODO add more +} + +/// Append-only threadsafe version of `std::vec::Vec` where +/// insertion does not require mutable access. +/// Does not have locks, only allows `Copy` types and will +/// spinlock on contention. The spinlocks are really rare as +/// they only happen on reallocation due to a push going over +/// the capacity. +pub struct LockFreeFrozenVec<T: Copy> { + data: AtomicPtr<T>, + len: AtomicUsize, + cap: AtomicUsize, +} + +impl<T: Copy> Drop for LockFreeFrozenVec<T> { + fn drop(&mut self) { + let cap = *self.cap.get_mut(); + let layout = self.layout(cap); + unsafe { + std::alloc::dealloc((*self.data.get_mut()).cast(), layout); + } + } +} + +impl<T: Copy> Default for LockFreeFrozenVec<T> { + fn default() -> Self { + Self { + // FIXME: use `std::ptr::invalid_mut()` once that is stable. + data: AtomicPtr::new(std::mem::align_of::<T>() as *mut T), + len: AtomicUsize::new(0), + cap: AtomicUsize::new(0), + } + } +} + +impl<T: Copy> LockFreeFrozenVec<T> { + pub fn new() -> Self { + Default::default() + } + + pub fn with_capacity(cap: usize) -> Self { + Self { + data: AtomicPtr::new( + Box::into_raw(vec![MaybeUninit::<T>::uninit(); cap].into_boxed_slice()).cast(), + ), + len: AtomicUsize::new(0), + cap: AtomicUsize::new(cap), + } + } + + fn lock<U>(&self, f: impl FnOnce(&mut *mut T) -> U) -> U { + let mut ptr; + loop { + ptr = self.data.swap(std::ptr::null_mut(), Ordering::Acquire); + if !ptr.is_null() { + // Wheeeee spinlock + break; + } + } + + let ret = f(&mut ptr); + self.data.store(ptr, Ordering::Release); + ret + } + + fn layout(&self, cap: usize) -> Layout { + let num_bytes = std::mem::size_of::<T>() * cap; + let align = std::mem::align_of::<T>(); + Layout::from_size_align(num_bytes, align).unwrap() + } + + // these should never return &T + // these should never delete any entries + + const NOT_ZST: () = if std::mem::size_of::<T>() == 0 { + panic!("`LockFreeFrozenVec` cannot be used with ZSTs"); + }; + + pub fn push(&self, val: T) -> usize { + // This statement actually does something: it evaluates a constant. + #[allow(path_statements)] + { + Self::NOT_ZST + } + self.lock(|ptr| { + // These values must be consistent with the pointer we got. + let len = self.len.load(Ordering::Acquire); + let cap = self.cap.load(Ordering::Acquire); + if len >= cap { + if cap == 0 { + // No memory allocated yet + let layout = self.layout(128); + // SAFETY: `LockFreeFrozenVec` statically rejects zsts + unsafe { + *ptr = std::alloc::alloc(layout).cast::<T>(); + } + // This is written before the end of the `lock` closure, so no one will observe this + // until the data pointer has been updated anyway. + self.cap.store(128, Ordering::Release); + } else { + // Out of memory, realloc with double the capacity + let layout = self.layout(cap); + let new_size = layout.size() * 2; + // SAFETY: `LockFreeFrozenVec` statically rejects zsts and the input `ptr` has always been + // allocated at the size stated in `cap`. + unsafe { + *ptr = std::alloc::realloc((*ptr).cast(), layout, new_size).cast::<T>(); + } + // This is written before the end of the `lock` closure, so no one will observe this + // until the data pointer has been updated anyway. + self.cap.store(cap * 2, Ordering::Release); + } + assert!(!ptr.is_null()); + } + unsafe { + ptr.add(len).write(val); + } + // This is written before updating the data pointer. Other `push` calls cannot observe this, + // because they are blocked on aquiring the data pointer before they ever read the `len`. + // `get` may read the length before actually aquiring the data pointer lock, but that is fine, + // as once it is able to aquire the lock, there will be actually the right number of elements + // stored. + self.len.store(len + 1, Ordering::Release); + len + }) + } + + pub fn get(&self, index: usize) -> Option<T> { + // The length can only grow, so just doing the length check + // independently of the `lock` and read is fine. Worst case we + // read an old length value and end up returning `None` even if + // another thread already inserted the value. + let len = self.len.load(Ordering::Relaxed); + if index >= len { + return None; + } + self.lock(|ptr| Some(unsafe { ptr.add(index).read() })) + } +} + +#[test] +fn test_non_lockfree() { + #[derive(Copy, Clone, Debug, PartialEq, Eq)] + struct Moo(i32); + + for vec in [ + LockFreeFrozenVec::new(), + LockFreeFrozenVec::with_capacity(1), + LockFreeFrozenVec::with_capacity(2), + LockFreeFrozenVec::with_capacity(1000), + ] { + assert_eq!(vec.get(1), None); + + vec.push(Moo(1)); + let i = vec.push(Moo(2)); + vec.push(Moo(3)); + + assert_eq!(vec.get(i), Some(Moo(2))); + + std::thread::scope(|s| { + s.spawn(|| { + for i in 0..1000 { + vec.push(Moo(i)); + } + }); + s.spawn(|| { + for i in 0..1000 { + vec.push(Moo(i)); + } + }); + for i in 0..2000 { + while vec.get(i).is_none() {} + } + }); + } +} + +/// Append-only threadsafe version of `std::collections::BTreeMap` where +/// insertion does not require mutable access +#[derive(Debug)] +pub struct FrozenBTreeMap<K, V>(RwLock<BTreeMap<K, V>>); + +impl<K: Clone + Ord, V: StableDeref> FrozenBTreeMap<K, V> { + pub fn new() -> Self { + Self(RwLock::new(BTreeMap::new())) + } + + // these should never return &K or &V + // these should never delete any entries + + /// Returns a reference to the value corresponding to the key. + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Ord`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.get(&1), Some(&"a")); + /// assert_eq!(map.get(&2), None); + /// ``` + pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V::Target> + where + K: Borrow<Q>, + Q: Ord, + { + let map = self.0.read().unwrap(); + let ret = unsafe { map.get(k).map(|x| &*(&**x as *const V::Target)) }; + ret + } + + /// Insert a new value into the map. Does nothing if the key is already occupied. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.get(&1), Some(&"a")); + /// ``` + pub fn insert(&self, k: K, v: V) -> &V::Target { + let mut map = self.0.write().unwrap(); + let ret = unsafe { + let inserted = &**map.entry(k).or_insert(v); + &*(inserted as *const _) + }; + ret + } + + /// Applies a function to the owner of the value corresponding to the key (if any). + /// + /// The key may be any borrowed form of the map's key type, but + /// [`Ord`] on the borrowed form *must* match those for + /// the key type. + /// + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.map_get(&1, Clone::clone), Some(Box::new("a"))); + /// assert_eq!(map.map_get(&2, Clone::clone), None); + /// ``` + pub fn map_get<Q: ?Sized, T, F>(&self, k: &Q, f: F) -> Option<T> + where + K: Borrow<Q>, + Q: Ord, + F: FnOnce(&V) -> T, + { + let map = self.0.read().unwrap(); + let ret = map.get(k).map(f); + ret + } + + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// assert_eq!(map.len(), 0); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.len(), 1); + /// ``` + pub fn len(&self) -> usize { + let map = self.0.read().unwrap(); + map.len() + } + + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// assert_eq!(map.is_empty(), true); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map.is_empty(), false); + /// ``` + pub fn is_empty(&self) -> bool { + let map = self.0.read().unwrap(); + map.is_empty() + } +} + +impl<K: Clone + Ord, V: StableDeref> From<BTreeMap<K, V>> for FrozenBTreeMap<K, V> { + fn from(map: BTreeMap<K, V>) -> Self { + Self(RwLock::new(map)) + } +} + +impl<Q: ?Sized, K, V> Index<&Q> for FrozenBTreeMap<K, V> +where + Q: Ord, + K: Clone + Ord + Borrow<Q>, + V: StableDeref, +{ + type Output = V::Target; + + /// # Examples + /// + /// ``` + /// use elsa::sync::FrozenBTreeMap; + /// + /// let map = FrozenBTreeMap::new(); + /// map.insert(1, Box::new("a")); + /// assert_eq!(map[&1], "a"); + /// ``` + fn index(&self, idx: &Q) -> &V::Target { + self.get(idx) + .expect("attempted to index FrozenBTreeMap with unknown key") + } +} + +impl<K: Clone + Ord, V: StableDeref> FromIterator<(K, V)> for FrozenBTreeMap<K, V> { + fn from_iter<T>(iter: T) -> Self + where + T: IntoIterator<Item = (K, V)>, + { + let map: BTreeMap<_, _> = iter.into_iter().collect(); + map.into() + } +} + +impl<K: Clone + Ord, V: StableDeref> Default for FrozenBTreeMap<K, V> { + fn default() -> Self { + Self::new() + } +} diff --git a/vendor/elsa/src/vec.rs b/vendor/elsa/src/vec.rs new file mode 100644 index 000000000..33b6e6a50 --- /dev/null +++ b/vendor/elsa/src/vec.rs @@ -0,0 +1,347 @@ +use std::cell::UnsafeCell; +use std::cmp::Ordering; +use std::iter::FromIterator; +use std::ops::Index; + +use stable_deref_trait::StableDeref; + +/// Append-only version of `std::vec::Vec` where +/// insertion does not require mutable access +pub struct FrozenVec<T> { + vec: UnsafeCell<Vec<T>>, + // XXXManishearth do we need a reentrancy guard here as well? + // StableDeref may not guarantee that there are no side effects +} + +// safety: UnsafeCell implies !Sync + +impl<T> FrozenVec<T> { + /// Constructs a new, empty vector. + pub fn new() -> Self { + Self { + vec: UnsafeCell::new(Default::default()), + } + } +} + +impl<T> FrozenVec<T> { + // these should never return &T + // these should never delete any entries + + /// Appends an element to the back of the vector. + pub fn push(&self, val: T) { + unsafe { + let vec = self.vec.get(); + (*vec).push(val) + } + } +} + +impl<T: StableDeref> FrozenVec<T> { + /// Push, immediately getting a reference to the element + pub fn push_get(&self, val: T) -> &T::Target { + unsafe { + let vec = self.vec.get(); + (*vec).push(val); + &*(&**(*vec).get_unchecked((*vec).len() - 1) as *const T::Target) + } + } + + /// Returns a reference to an element. + pub fn get(&self, index: usize) -> Option<&T::Target> { + unsafe { + let vec = self.vec.get(); + (*vec).get(index).map(|x| &**x) + } + } + + /// Returns a reference to an element, without doing bounds checking. + /// + /// ## Safety + /// + /// `index` must be in bounds, i.e. it must be less than `self.len()` + pub unsafe fn get_unchecked(&self, index: usize) -> &T::Target { + let vec = self.vec.get(); + &**(*vec).get_unchecked(index) + } +} + +impl<T: Copy> FrozenVec<T> { + /// Returns a copy of an element. + pub fn get_copy(&self, index: usize) -> Option<T> { + unsafe { + let vec = self.vec.get(); + (*vec).get(index).copied() + } + } +} + +impl<T> FrozenVec<T> { + /// Returns the number of elements in the vector. + pub fn len(&self) -> usize { + unsafe { + let vec = self.vec.get(); + (*vec).len() + } + } + + /// Returns `true` if the vector contains no elements. + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl<T: StableDeref> FrozenVec<T> { + /// Returns the first element of the vector, or `None` if empty. + pub fn first(&self) -> Option<&T::Target> { + unsafe { + let vec = self.vec.get(); + (*vec).first().map(|x| &**x) + } + } + + /// Returns the last element of the vector, or `None` if empty. + pub fn last(&self) -> Option<&T::Target> { + unsafe { + let vec = self.vec.get(); + (*vec).last().map(|x| &**x) + } + } + /// Returns an iterator over the vector. + pub fn iter(&self) -> Iter<T> { + self.into_iter() + } +} + +impl<T: StableDeref> FrozenVec<T> { + /// Converts the frozen vector into a plain vector. + pub fn into_vec(self) -> Vec<T> { + self.vec.into_inner() + } +} + +impl<T: StableDeref> FrozenVec<T> { + // binary search functions: they need to be reimplemented here to be safe (instead of calling + // their equivalents directly on the underlying Vec), as they run user callbacks that could + // reentrantly call other functions on this vector + + /// Binary searches this sorted vector for a given element, analogous to [slice::binary_search]. + pub fn binary_search(&self, x: &T::Target) -> Result<usize, usize> + where + T::Target: Ord, + { + self.binary_search_by(|p| p.cmp(x)) + } + + /// Binary searches this sorted vector with a comparator function, analogous to + /// [slice::binary_search_by]. + pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result<usize, usize> + where + F: FnMut(&'a T::Target) -> Ordering, + { + let mut size = self.len(); + let mut left = 0; + let mut right = size; + while left < right { + let mid = left + size / 2; + + // safety: like the core algorithm, mid is always within original vector len; in + // pathlogical cases, user could push to the vector in the meantime, but this can only + // increase the length, keeping this safe + let cmp = f(unsafe { self.get_unchecked(mid) }); + + if cmp == Ordering::Less { + left = mid + 1; + } else if cmp == Ordering::Greater { + right = mid; + } else { + return Ok(mid); + } + + size = right - left; + } + Err(left) + } + + /// Binary searches this sorted vector with a key extraction function, analogous to + /// [slice::binary_search_by_key]. + pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize> + where + F: FnMut(&'a T::Target) -> B, + B: Ord, + { + self.binary_search_by(|k| f(k).cmp(b)) + } + + /// Returns the index of the partition point according to the given predicate + /// (the index of the first element of the second partition), analogous to + /// [slice::partition_point]. + pub fn partition_point<P>(&self, mut pred: P) -> usize + where + P: FnMut(&T::Target) -> bool, + { + let mut left = 0; + let mut right = self.len(); + + while left != right { + let mid = left + (right - left) / 2; + // safety: like in binary_search_by + let value = unsafe { self.get_unchecked(mid) }; + if pred(value) { + left = mid + 1; + } else { + right = mid; + } + } + + left + } + + // TODO add more +} + +impl<T> std::convert::AsMut<Vec<T>> for FrozenVec<T> { + /// Get mutable access to the underlying vector. + /// + /// This is safe, as it requires a `&mut self`, ensuring nothing is using + /// the 'frozen' contents. + fn as_mut(&mut self) -> &mut Vec<T> { + unsafe { &mut *self.vec.get() } + } +} + +impl<T> Default for FrozenVec<T> { + fn default() -> Self { + FrozenVec::new() + } +} + +impl<T> From<Vec<T>> for FrozenVec<T> { + fn from(vec: Vec<T>) -> Self { + Self { + vec: UnsafeCell::new(vec), + } + } +} + +impl<T: StableDeref> Index<usize> for FrozenVec<T> { + type Output = T::Target; + fn index(&self, idx: usize) -> &T::Target { + self.get(idx).unwrap_or_else(|| { + panic!( + "index out of bounds: the len is {} but the index is {}", + self.len(), + idx + ) + }) + } +} + +impl<A> FromIterator<A> for FrozenVec<A> { + fn from_iter<T>(iter: T) -> Self + where + T: IntoIterator<Item = A>, + { + let vec: Vec<_> = iter.into_iter().collect(); + vec.into() + } +} + +/// Iterator over FrozenVec, obtained via `.iter()` +/// +/// It is safe to push to the vector during iteration +pub struct Iter<'a, T> { + vec: &'a FrozenVec<T>, + idx: usize, +} + +impl<'a, T: StableDeref> Iterator for Iter<'a, T> { + type Item = &'a T::Target; + fn next(&mut self) -> Option<&'a T::Target> { + if let Some(ret) = self.vec.get(self.idx) { + self.idx += 1; + Some(ret) + } else { + None + } + } +} + +impl<'a, T: StableDeref> IntoIterator for &'a FrozenVec<T> { + type Item = &'a T::Target; + type IntoIter = Iter<'a, T>; + fn into_iter(self) -> Iter<'a, T> { + Iter { vec: self, idx: 0 } + } +} + +#[test] +fn test_iteration() { + let vec = vec!["a", "b", "c", "d"]; + let frozen: FrozenVec<_> = vec.clone().into(); + + assert_eq!(vec, frozen.iter().collect::<Vec<_>>()); + for (e1, e2) in vec.iter().zip(frozen.iter()) { + assert_eq!(*e1, e2); + } + + assert_eq!(vec.len(), frozen.iter().count()) +} + +#[test] +fn test_accessors() { + let vec: FrozenVec<String> = FrozenVec::new(); + + assert_eq!(vec.is_empty(), true); + assert_eq!(vec.len(), 0); + assert_eq!(vec.first(), None); + assert_eq!(vec.last(), None); + assert_eq!(vec.get(1), None); + + vec.push("a".to_string()); + vec.push("b".to_string()); + vec.push("c".to_string()); + + assert_eq!(vec.is_empty(), false); + assert_eq!(vec.len(), 3); + assert_eq!(vec.first(), Some("a")); + assert_eq!(vec.last(), Some("c")); + assert_eq!(vec.get(1), Some("b")); +} + +#[test] +fn test_non_stable_deref() { + #[derive(Copy, Clone, Debug, PartialEq, Eq)] + struct Moo(i32); + let vec: FrozenVec<Moo> = FrozenVec::new(); + + assert_eq!(vec.is_empty(), true); + assert_eq!(vec.len(), 0); + assert_eq!(vec.get_copy(1), None); + + vec.push(Moo(1)); + vec.push(Moo(2)); + vec.push(Moo(3)); + + assert_eq!(vec.is_empty(), false); + assert_eq!(vec.len(), 3); + assert_eq!(vec.get_copy(1), Some(Moo(2))); +} + +#[test] +fn test_binary_search() { + let vec: FrozenVec<_> = vec!["ab", "cde", "fghij"].into(); + + assert_eq!(vec.binary_search("cde"), Ok(1)); + assert_eq!(vec.binary_search("cdf"), Err(2)); + assert_eq!(vec.binary_search("a"), Err(0)); + assert_eq!(vec.binary_search("g"), Err(3)); + + assert_eq!(vec.binary_search_by_key(&1, |x| x.len()), Err(0)); + assert_eq!(vec.binary_search_by_key(&3, |x| x.len()), Ok(1)); + assert_eq!(vec.binary_search_by_key(&4, |x| x.len()), Err(2)); + + assert_eq!(vec.partition_point(|x| x.len() < 4), 2); + assert_eq!(vec.partition_point(|_| false), 0); + assert_eq!(vec.partition_point(|_| true), 3); +} |