diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
commit | 36d22d82aa202bb199967e9512281e9a53db42c9 (patch) | |
tree | 105e8c98ddea1c1e4784a60a5a6410fa416be2de /third_party/rust/itertools/src/unique_impl.rs | |
parent | Initial commit. (diff) | |
download | firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip |
Adding upstream version 115.7.0esr.upstream/115.7.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/itertools/src/unique_impl.rs')
-rw-r--r-- | third_party/rust/itertools/src/unique_impl.rs | 179 |
1 files changed, 179 insertions, 0 deletions
diff --git a/third_party/rust/itertools/src/unique_impl.rs b/third_party/rust/itertools/src/unique_impl.rs new file mode 100644 index 0000000000..4e81e78ec0 --- /dev/null +++ b/third_party/rust/itertools/src/unique_impl.rs @@ -0,0 +1,179 @@ +use std::collections::HashMap; +use std::collections::hash_map::Entry; +use std::hash::Hash; +use std::fmt; +use std::iter::FusedIterator; + +/// An iterator adapter to filter out duplicate elements. +/// +/// See [`.unique_by()`](crate::Itertools::unique) for more information. +#[derive(Clone)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +pub struct UniqueBy<I: Iterator, V, F> { + iter: I, + // Use a Hashmap for the Entry API in order to prevent hashing twice. + // This can maybe be replaced with a HashSet once `get_or_insert_with` + // or a proper Entry API for Hashset is stable and meets this msrv + used: HashMap<V, ()>, + f: F, +} + +impl<I, V, F> fmt::Debug for UniqueBy<I, V, F> + where I: Iterator + fmt::Debug, + V: fmt::Debug + Hash + Eq, +{ + debug_fmt_fields!(UniqueBy, iter, used); +} + +/// Create a new `UniqueBy` iterator. +pub fn unique_by<I, V, F>(iter: I, f: F) -> UniqueBy<I, V, F> + where V: Eq + Hash, + F: FnMut(&I::Item) -> V, + I: Iterator, +{ + UniqueBy { + iter, + used: HashMap::new(), + f, + } +} + +// count the number of new unique keys in iterable (`used` is the set already seen) +fn count_new_keys<I, K>(mut used: HashMap<K, ()>, iterable: I) -> usize + where I: IntoIterator<Item=K>, + K: Hash + Eq, +{ + let iter = iterable.into_iter(); + let current_used = used.len(); + used.extend(iter.map(|key| (key, ()))); + used.len() - current_used +} + +impl<I, V, F> Iterator for UniqueBy<I, V, F> + where I: Iterator, + V: Eq + Hash, + F: FnMut(&I::Item) -> V +{ + type Item = I::Item; + + fn next(&mut self) -> Option<Self::Item> { + while let Some(v) = self.iter.next() { + let key = (self.f)(&v); + if self.used.insert(key, ()).is_none() { + return Some(v); + } + } + None + } + + #[inline] + fn size_hint(&self) -> (usize, Option<usize>) { + let (low, hi) = self.iter.size_hint(); + ((low > 0 && self.used.is_empty()) as usize, hi) + } + + fn count(self) -> usize { + let mut key_f = self.f; + count_new_keys(self.used, self.iter.map(move |elt| key_f(&elt))) + } +} + +impl<I, V, F> DoubleEndedIterator for UniqueBy<I, V, F> + where I: DoubleEndedIterator, + V: Eq + Hash, + F: FnMut(&I::Item) -> V +{ + fn next_back(&mut self) -> Option<Self::Item> { + while let Some(v) = self.iter.next_back() { + let key = (self.f)(&v); + if self.used.insert(key, ()).is_none() { + return Some(v); + } + } + None + } +} + +impl<I, V, F> FusedIterator for UniqueBy<I, V, F> + where I: FusedIterator, + V: Eq + Hash, + F: FnMut(&I::Item) -> V +{} + +impl<I> Iterator for Unique<I> + where I: Iterator, + I::Item: Eq + Hash + Clone +{ + type Item = I::Item; + + fn next(&mut self) -> Option<Self::Item> { + while let Some(v) = self.iter.iter.next() { + if let Entry::Vacant(entry) = self.iter.used.entry(v) { + let elt = entry.key().clone(); + entry.insert(()); + return Some(elt); + } + } + None + } + + #[inline] + fn size_hint(&self) -> (usize, Option<usize>) { + let (low, hi) = self.iter.iter.size_hint(); + ((low > 0 && self.iter.used.is_empty()) as usize, hi) + } + + fn count(self) -> usize { + count_new_keys(self.iter.used, self.iter.iter) + } +} + +impl<I> DoubleEndedIterator for Unique<I> + where I: DoubleEndedIterator, + I::Item: Eq + Hash + Clone +{ + fn next_back(&mut self) -> Option<Self::Item> { + while let Some(v) = self.iter.iter.next_back() { + if let Entry::Vacant(entry) = self.iter.used.entry(v) { + let elt = entry.key().clone(); + entry.insert(()); + return Some(elt); + } + } + None + } +} + +impl<I> FusedIterator for Unique<I> + where I: FusedIterator, + I::Item: Eq + Hash + Clone +{} + +/// An iterator adapter to filter out duplicate elements. +/// +/// See [`.unique()`](crate::Itertools::unique) for more information. +#[derive(Clone)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +pub struct Unique<I: Iterator> { + iter: UniqueBy<I, I::Item, ()>, +} + +impl<I> fmt::Debug for Unique<I> + where I: Iterator + fmt::Debug, + I::Item: Hash + Eq + fmt::Debug, +{ + debug_fmt_fields!(Unique, iter); +} + +pub fn unique<I>(iter: I) -> Unique<I> + where I: Iterator, + I::Item: Eq + Hash, +{ + Unique { + iter: UniqueBy { + iter, + used: HashMap::new(), + f: (), + } + } +} |