summaryrefslogtreecommitdiffstats
path: root/vendor/hashbrown/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
commit9918693037dce8aa4bb6f08741b6812923486c18 (patch)
tree21d2b40bec7e6a7ea664acee056eb3d08e15a1cf /vendor/hashbrown/src
parentReleasing progress-linux version 1.75.0+dfsg1-5~progress7.99u1. (diff)
downloadrustc-9918693037dce8aa4bb6f08741b6812923486c18.tar.xz
rustc-9918693037dce8aa4bb6f08741b6812923486c18.zip
Merging upstream version 1.76.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/hashbrown/src')
-rw-r--r--vendor/hashbrown/src/map.rs78
-rw-r--r--vendor/hashbrown/src/raw/mod.rs98
-rw-r--r--vendor/hashbrown/src/set.rs68
-rw-r--r--vendor/hashbrown/src/table.rs38
4 files changed, 274 insertions, 8 deletions
diff --git a/vendor/hashbrown/src/map.rs b/vendor/hashbrown/src/map.rs
index b5e657bc6..9cd768f20 100644
--- a/vendor/hashbrown/src/map.rs
+++ b/vendor/hashbrown/src/map.rs
@@ -2469,6 +2469,14 @@ impl<K, V, A: Allocator> Iterator for IntoKeys<K, V, A> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[inline]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, |acc, (k, _)| f(acc, k))
+ }
}
impl<K, V, A: Allocator> ExactSizeIterator for IntoKeys<K, V, A> {
@@ -2531,6 +2539,14 @@ impl<K, V, A: Allocator> Iterator for IntoValues<K, V, A> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[inline]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, |acc, (_, v)| f(acc, v))
+ }
}
impl<K, V, A: Allocator> ExactSizeIterator for IntoValues<K, V, A> {
@@ -4722,6 +4738,17 @@ impl<'a, K, V> Iterator for Iter<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, |acc, x| unsafe {
+ let (k, v) = x.as_ref();
+ f(acc, (k, v))
+ })
+ }
}
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -4750,6 +4777,17 @@ impl<'a, K, V> Iterator for IterMut<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, |acc, x| unsafe {
+ let (k, v) = x.as_mut();
+ f(acc, (k, v))
+ })
+ }
}
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -4780,6 +4818,14 @@ impl<K, V, A: Allocator> Iterator for IntoIter<K, V, A> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, f)
+ }
}
impl<K, V, A: Allocator> ExactSizeIterator for IntoIter<K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -4810,6 +4856,14 @@ impl<'a, K, V> Iterator for Keys<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, |acc, (k, _)| f(acc, k))
+ }
}
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -4834,6 +4888,14 @@ impl<'a, K, V> Iterator for Values<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, |acc, (_, v)| f(acc, v))
+ }
}
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -4858,6 +4920,14 @@ impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, |acc, (_, v)| f(acc, v))
+ }
}
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -4886,6 +4956,14 @@ impl<'a, K, V, A: Allocator> Iterator for Drain<'a, K, V, A> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, f)
+ }
}
impl<K, V, A: Allocator> ExactSizeIterator for Drain<'_, K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
diff --git a/vendor/hashbrown/src/raw/mod.rs b/vendor/hashbrown/src/raw/mod.rs
index 25c5d1c4d..711fb89d0 100644
--- a/vendor/hashbrown/src/raw/mod.rs
+++ b/vendor/hashbrown/src/raw/mod.rs
@@ -57,14 +57,12 @@ use core::convert::identity as unlikely;
#[cfg(feature = "nightly")]
use core::intrinsics::{likely, unlikely};
-// Use strict provenance functions if available.
-#[cfg(feature = "nightly")]
-use core::ptr::invalid_mut;
-// Implement it with a cast otherwise.
-#[cfg(not(feature = "nightly"))]
+// FIXME: use strict provenance functions once they are stable.
+// Implement it with a transmute for now.
#[inline(always)]
+#[allow(clippy::useless_transmute)] // clippy is wrong, cast and transmute are different here
fn invalid_mut<T>(addr: usize) -> *mut T {
- addr as *mut T
+ unsafe { core::mem::transmute(addr) }
}
#[inline]
@@ -3846,6 +3844,85 @@ impl<T> RawIterRange<T> {
self.next_ctrl = self.next_ctrl.add(Group::WIDTH);
}
}
+
+ /// Folds every element into an accumulator by applying an operation,
+ /// returning the final result.
+ ///
+ /// `fold_impl()` takes three arguments: the number of items remaining in
+ /// the iterator, an initial value, and a closure with two arguments: an
+ /// 'accumulator', and an element. The closure returns the value that the
+ /// accumulator should have for the next iteration.
+ ///
+ /// The initial value is the value the accumulator will have on the first call.
+ ///
+ /// After applying this closure to every element of the iterator, `fold_impl()`
+ /// returns the accumulator.
+ ///
+ /// # Safety
+ ///
+ /// If any of the following conditions are violated, the result is
+ /// [`Undefined Behavior`]:
+ ///
+ /// * The [`RawTableInner`] / [`RawTable`] must be alive and not moved,
+ /// i.e. table outlives the `RawIterRange`;
+ ///
+ /// * The provided `n` value must match the actual number of items
+ /// in the table.
+ ///
+ /// [`Undefined Behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+ #[allow(clippy::while_let_on_iterator)]
+ #[cfg_attr(feature = "inline-more", inline)]
+ unsafe fn fold_impl<F, B>(mut self, mut n: usize, mut acc: B, mut f: F) -> B
+ where
+ F: FnMut(B, Bucket<T>) -> B,
+ {
+ loop {
+ while let Some(index) = self.current_group.next() {
+ // The returned `index` will always be in the range `0..Group::WIDTH`,
+ // so that calling `self.data.next_n(index)` is safe (see detailed explanation below).
+ debug_assert!(n != 0);
+ let bucket = self.data.next_n(index);
+ acc = f(acc, bucket);
+ n -= 1;
+ }
+
+ if n == 0 {
+ return acc;
+ }
+
+ // SAFETY: The caller of this function ensures that:
+ //
+ // 1. The provided `n` value matches the actual number of items in the table;
+ // 2. The table is alive and did not moved.
+ //
+ // Taking the above into account, we always stay within the bounds, because:
+ //
+ // 1. For tables smaller than the group width (self.buckets() <= Group::WIDTH),
+ // we will never end up in the given branch, since we should have already
+ // yielded all the elements of the table.
+ //
+ // 2. For tables larger than the group width. The the number of buckets is a
+ // power of two (2 ^ n), Group::WIDTH is also power of two (2 ^ k). Sinse
+ // `(2 ^ n) > (2 ^ k)`, than `(2 ^ n) % (2 ^ k) = 0`. As we start from the
+ // the start of the array of control bytes, and never try to iterate after
+ // getting all the elements, the last `self.current_group` will read bytes
+ // from the `self.buckets() - Group::WIDTH` index. We know also that
+ // `self.current_group.next()` will always retun indices within the range
+ // `0..Group::WIDTH`.
+ //
+ // Knowing all of the above and taking into account that we are synchronizing
+ // the `self.data` index with the index we used to read the `self.current_group`,
+ // the subsequent `self.data.next_n(index)` will always return a bucket with
+ // an index number less than `self.buckets()`.
+ //
+ // The last `self.next_ctrl`, whose index would be `self.buckets()`, will never
+ // actually be read, since we should have already yielded all the elements of
+ // the table.
+ self.current_group = Group::load_aligned(self.next_ctrl).match_full().into_iter();
+ self.data = self.data.next_n(Group::WIDTH);
+ self.next_ctrl = self.next_ctrl.add(Group::WIDTH);
+ }
+ }
}
// We make raw iterators unconditionally Send and Sync, and let the PhantomData
@@ -4069,6 +4146,15 @@ impl<T> Iterator for RawIter<T> {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.items, Some(self.items))
}
+
+ #[inline]
+ fn fold<B, F>(self, init: B, f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ unsafe { self.iter.fold_impl(self.items, init, f) }
+ }
}
impl<T> ExactSizeIterator for RawIter<T> {}
diff --git a/vendor/hashbrown/src/set.rs b/vendor/hashbrown/src/set.rs
index 09b45fd9f..fdff46d88 100644
--- a/vendor/hashbrown/src/set.rs
+++ b/vendor/hashbrown/src/set.rs
@@ -1696,6 +1696,14 @@ impl<'a, K> Iterator for Iter<'a, K> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.iter.fold(init, f)
+ }
}
impl<'a, K> ExactSizeIterator for Iter<'a, K> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -1726,6 +1734,14 @@ impl<K, A: Allocator> Iterator for IntoIter<K, A> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.iter.fold(init, |acc, (k, ())| f(acc, k))
+ }
}
impl<K, A: Allocator> ExactSizeIterator for IntoIter<K, A> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -1757,6 +1773,14 @@ impl<K, A: Allocator> Iterator for Drain<'_, K, A> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.iter.fold(init, |acc, (k, ())| f(acc, k))
+ }
}
impl<K, A: Allocator> ExactSizeIterator for Drain<'_, K, A> {
#[cfg_attr(feature = "inline-more", inline)]
@@ -1827,6 +1851,20 @@ where
let (_, upper) = self.iter.size_hint();
(0, upper)
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.iter.fold(init, |acc, elt| {
+ if self.other.contains(elt) {
+ f(acc, elt)
+ } else {
+ acc
+ }
+ })
+ }
}
impl<T, S, A> fmt::Debug for Intersection<'_, T, S, A>
@@ -1881,6 +1919,20 @@ where
let (_, upper) = self.iter.size_hint();
(0, upper)
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.iter.fold(init, |acc, elt| {
+ if self.other.contains(elt) {
+ acc
+ } else {
+ f(acc, elt)
+ }
+ })
+ }
}
impl<T, S, A> FusedIterator for Difference<'_, T, S, A>
@@ -1927,6 +1979,14 @@ where
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.iter.fold(init, f)
+ }
}
impl<T, S, A> FusedIterator for SymmetricDifference<'_, T, S, A>
@@ -1992,6 +2052,14 @@ where
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
+ #[cfg_attr(feature = "inline-more", inline)]
+ fn fold<B, F>(self, init: B, f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.iter.fold(init, f)
+ }
}
/// A view into a single entry in a set, which may either be vacant or occupied.
diff --git a/vendor/hashbrown/src/table.rs b/vendor/hashbrown/src/table.rs
index bfb5dd989..a7bb5fcc9 100644
--- a/vendor/hashbrown/src/table.rs
+++ b/vendor/hashbrown/src/table.rs
@@ -1861,12 +1861,25 @@ impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|bucket| unsafe { bucket.as_ref() })
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.inner.next() {
+ Some(bucket) => Some(unsafe { bucket.as_ref() }),
+ None => None,
+ }
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner
+ .fold(init, |acc, bucket| unsafe { f(acc, bucket.as_ref()) })
+ }
}
impl<T> ExactSizeIterator for Iter<'_, T> {
@@ -1894,12 +1907,25 @@ impl<'a, T> Iterator for IterMut<'a, T> {
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
- self.inner.next().map(|bucket| unsafe { bucket.as_mut() })
+ // Avoid `Option::map` because it bloats LLVM IR.
+ match self.inner.next() {
+ Some(bucket) => Some(unsafe { bucket.as_mut() }),
+ None => None,
+ }
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+
+ fn fold<B, F>(self, init: B, mut f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner
+ .fold(init, |acc, bucket| unsafe { f(acc, bucket.as_mut()) })
+ }
}
impl<T> ExactSizeIterator for IterMut<'_, T> {
@@ -1940,6 +1966,14 @@ where
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
+
+ fn fold<B, F>(self, init: B, f: F) -> B
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> B,
+ {
+ self.inner.fold(init, f)
+ }
}
impl<T, A> ExactSizeIterator for IntoIter<T, A>