From 4f9fe856a25ab29345b90e7725509e9ee38a37be Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:19:41 +0200 Subject: Adding upstream version 1.69.0+dfsg1. Signed-off-by: Daniel Baumann --- vendor/elsa/src/index_set.rs | 180 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 180 insertions(+) create mode 100644 vendor/elsa/src/index_set.rs (limited to 'vendor/elsa/src/index_set.rs') diff --git a/vendor/elsa/src/index_set.rs b/vendor/elsa/src/index_set.rs new file mode 100644 index 000000000..1222cde05 --- /dev/null +++ b/vendor/elsa/src/index_set.rs @@ -0,0 +1,180 @@ +use std::borrow::Borrow; +use std::cell::{Cell, UnsafeCell}; +use std::collections::hash_map::RandomState; +use std::hash::{BuildHasher, Hash}; +use std::iter::FromIterator; +use std::ops::Index; + +use indexmap::IndexSet; +use stable_deref_trait::StableDeref; + +/// Append-only version of `indexmap::IndexSet` where +/// insertion does not require mutable access +pub struct FrozenIndexSet { + set: UnsafeCell>, + /// Eq/Hash implementations can have side-effects, and using Rc it is possible + /// for FrozenIndexSet::insert to be called on a key that itself contains the same + /// `FrozenIndexSet`, whose `eq` implementation also calls FrozenIndexSet::insert + /// + /// We use this `in_use` flag to guard against any reentrancy. + in_use: Cell, +} + +// safety: UnsafeCell implies !Sync + +impl FrozenIndexSet { + pub fn new() -> Self { + Self::from(IndexSet::new()) + } +} + +impl FrozenIndexSet { + // these should never return &T + // these should never delete any entries + pub fn insert(&self, value: T) -> &T::Target { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + let (index, _was_vacant) = (*set).insert_full(value); + &*(*set)[index] + }; + self.in_use.set(false); + ret + } + + // these should never return &T + // these should never delete any entries + pub fn insert_full(&self, value: T) -> (usize, &T::Target) { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + let (index, _was_vacant) = (*set).insert_full(value); + (index, &*(*set)[index]) + }; + self.in_use.set(false); + ret + } + + // TODO implement in case the standard Entry API gets improved + // // TODO avoid double lookup + // pub fn entry(&self, value: &Q) -> Entry + // where Q: Hash + Equivalent + ToOwned + // { + // assert!(!self.in_use.get()); + // self.in_use.set(true); + // unsafe { + // let set = self.set.get(); + // match (*set).get_full(value) { + // Some((index, reference)) => { + // Entry::Occupied(OccupiedEntry { + // index, + // reference, + // set: &*set, + // }) + // } + // None => { + // Entry::Vacant(VacantEntry { + // value: Cow::Borrowed(value), + // set: &*set, + // }) + // } + // } + // } + // } + + pub fn get(&self, k: &Q) -> Option<&T::Target> + where + T: Borrow, + Q: Hash + Eq, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + (*set).get(k).map(|x| &**x) + }; + self.in_use.set(false); + ret + } + + pub fn get_full(&self, k: &Q) -> Option<(usize, &T::Target)> + where + T: Borrow, + Q: Hash + Eq, + { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + (*set).get_full(k).map(|(i, x)| (i, &**x)) + }; + self.in_use.set(false); + ret + } + + pub fn get_index(&self, index: usize) -> Option<&T::Target> { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + (*set).get_index(index).map(|r| &**r) + }; + self.in_use.set(false); + ret + } + + pub fn into_set(self) -> IndexSet { + self.set.into_inner() + } + + /// Get mutable access to the underlying [`IndexSet`]. + /// + /// This is safe, as it requires a `&mut self`, ensuring nothing is using + /// the 'frozen' contents. + pub fn as_mut(&mut self) -> &mut IndexSet { + unsafe { &mut *self.set.get() } + } + + // TODO add more +} + +impl From> for FrozenIndexSet { + fn from(set: IndexSet) -> Self { + Self { + set: UnsafeCell::new(set), + in_use: Cell::new(false), + } + } +} + +impl Index for FrozenIndexSet { + type Output = T::Target; + fn index(&self, idx: usize) -> &T::Target { + assert!(!self.in_use.get()); + self.in_use.set(true); + let ret = unsafe { + let set = self.set.get(); + &*(*set)[idx] + }; + self.in_use.set(false); + ret + } +} + +impl FromIterator for FrozenIndexSet { + fn from_iter(iter: U) -> Self + where + U: IntoIterator, + { + let set: IndexSet<_, _> = iter.into_iter().collect(); + set.into() + } +} + +impl Default for FrozenIndexSet { + fn default() -> Self { + Self::from(IndexSet::default()) + } +} -- cgit v1.2.3