summaryrefslogtreecommitdiffstats
path: root/library/core/src/iter/adapters
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
commitd1b2d29528b7794b41e66fc2136e395a02f8529b (patch)
treea4a17504b260206dec3cf55b2dca82929a348ac2 /library/core/src/iter/adapters
parentReleasing progress-linux version 1.72.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.tar.xz
rustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.zip
Merging upstream version 1.73.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'library/core/src/iter/adapters')
-rw-r--r--library/core/src/iter/adapters/flatten.rs7
-rw-r--r--library/core/src/iter/adapters/map_windows.rs293
-rw-r--r--library/core/src/iter/adapters/mod.rs4
3 files changed, 298 insertions, 6 deletions
diff --git a/library/core/src/iter/adapters/flatten.rs b/library/core/src/iter/adapters/flatten.rs
index d3e454563..eee6e5bcc 100644
--- a/library/core/src/iter/adapters/flatten.rs
+++ b/library/core/src/iter/adapters/flatten.rs
@@ -310,7 +310,7 @@ where
/// Real logic of both `Flatten` and `FlatMap` which simply delegate to
/// this type.
#[derive(Clone, Debug)]
-#[cfg_attr(bootstrap, unstable(feature = "trusted_len", issue = "37572"))]
+#[unstable(feature = "trusted_len", issue = "37572")]
struct FlattenCompat<I, U> {
iter: Fuse<I>,
frontiter: Option<U>,
@@ -464,7 +464,6 @@ where
}
}
-#[cfg_attr(bootstrap, unstable(feature = "trusted_len", issue = "37572"))]
impl<I, U> Iterator for FlattenCompat<I, U>
where
I: Iterator<Item: IntoIterator<IntoIter = U, Item = U::Item>>,
@@ -579,7 +578,6 @@ where
}
}
-#[cfg_attr(bootstrap, unstable(feature = "trusted_len", issue = "37572"))]
impl<I, U> DoubleEndedIterator for FlattenCompat<I, U>
where
I: DoubleEndedIterator<Item: IntoIterator<IntoIter = U, Item = U::Item>>,
@@ -649,7 +647,6 @@ where
}
}
-#[cfg_attr(bootstrap, unstable(feature = "trusted_len", issue = "37572"))]
unsafe impl<const N: usize, I, T> TrustedLen
for FlattenCompat<I, <[T; N] as IntoIterator>::IntoIter>
where
@@ -657,7 +654,6 @@ where
{
}
-#[cfg_attr(bootstrap, unstable(feature = "trusted_len", issue = "37572"))]
unsafe impl<'a, const N: usize, I, T> TrustedLen
for FlattenCompat<I, <&'a [T; N] as IntoIterator>::IntoIter>
where
@@ -665,7 +661,6 @@ where
{
}
-#[cfg_attr(bootstrap, unstable(feature = "trusted_len", issue = "37572"))]
unsafe impl<'a, const N: usize, I, T> TrustedLen
for FlattenCompat<I, <&'a mut [T; N] as IntoIterator>::IntoIter>
where
diff --git a/library/core/src/iter/adapters/map_windows.rs b/library/core/src/iter/adapters/map_windows.rs
new file mode 100644
index 000000000..3c0e80b25
--- /dev/null
+++ b/library/core/src/iter/adapters/map_windows.rs
@@ -0,0 +1,293 @@
+use crate::{
+ fmt,
+ iter::{ExactSizeIterator, FusedIterator},
+ mem::{self, MaybeUninit},
+ ptr,
+};
+
+/// An iterator over the mapped windows of another iterator.
+///
+/// This `struct` is created by the [`Iterator::map_windows`]. See its
+/// documentation for more information.
+#[must_use = "iterators are lazy and do nothing unless consumed"]
+#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")]
+pub struct MapWindows<I: Iterator, F, const N: usize> {
+ f: F,
+ inner: MapWindowsInner<I, N>,
+}
+
+struct MapWindowsInner<I: Iterator, const N: usize> {
+ // We fuse the inner iterator because there shouldn't be "holes" in
+ // the sliding window. Once the iterator returns a `None`, we make
+ // our `MapWindows` iterator return `None` forever.
+ iter: Option<I>,
+ // Since iterators are assumed lazy, i.e. it only yields an item when
+ // `Iterator::next()` is called, and `MapWindows` is not an exception.
+ //
+ // Before the first iteration, we keep the buffer `None`. When the user
+ // first call `next` or other methods that makes the iterator advance,
+ // we collect the first `N` items yielded from the inner iterator and
+ // put it into the buffer.
+ //
+ // When the inner iterator has returned a `None` (i.e. fused), we take
+ // away this `buffer` and leave it `None` to reclaim its resources.
+ //
+ // FIXME: should we shrink the size of `buffer` using niche optimization?
+ buffer: Option<Buffer<I::Item, N>>,
+}
+
+// `Buffer` uses two times of space to reduce moves among the iterations.
+// `Buffer<T, N>` is semantically `[MaybeUninit<T>; 2 * N]`. However, due
+// to limitations of const generics, we use this different type. Note that
+// it has the same underlying memory layout.
+struct Buffer<T, const N: usize> {
+ // Invariant: `self.buffer[self.start..self.start + N]` is initialized,
+ // with all other elements being uninitialized. This also
+ // implies that `self.start <= N`.
+ buffer: [[MaybeUninit<T>; N]; 2],
+ start: usize,
+}
+
+impl<I: Iterator, F, const N: usize> MapWindows<I, F, N> {
+ pub(in crate::iter) fn new(iter: I, f: F) -> Self {
+ assert!(N != 0, "array in `Iterator::map_windows` must contain more than 0 elements");
+
+ // Only ZST arrays' length can be so large.
+ if mem::size_of::<I::Item>() == 0 {
+ assert!(
+ N.checked_mul(2).is_some(),
+ "array size of `Iterator::map_windows` is too large"
+ );
+ }
+
+ Self { inner: MapWindowsInner::new(iter), f }
+ }
+}
+
+impl<I: Iterator, const N: usize> MapWindowsInner<I, N> {
+ #[inline]
+ fn new(iter: I) -> Self {
+ Self { iter: Some(iter), buffer: None }
+ }
+
+ fn next_window(&mut self) -> Option<&[I::Item; N]> {
+ let iter = self.iter.as_mut()?;
+ match self.buffer {
+ // It is the first time to advance. We collect
+ // the first `N` items from `self.iter` to initialize `self.buffer`.
+ None => self.buffer = Buffer::try_from_iter(iter),
+ Some(ref mut buffer) => match iter.next() {
+ None => {
+ // Fuse the inner iterator since it yields a `None`.
+ self.iter.take();
+ self.buffer.take();
+ }
+ // Advance the iterator. We first call `next` before changing our buffer
+ // at all. This means that if `next` panics, our invariant is upheld and
+ // our `Drop` impl drops the correct elements.
+ Some(item) => buffer.push(item),
+ },
+ }
+ self.buffer.as_ref().map(Buffer::as_array_ref)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let Some(ref iter) = self.iter else { return (0, Some(0)) };
+ let (lo, hi) = iter.size_hint();
+ if self.buffer.is_some() {
+ // If the first `N` items are already yielded by the inner iterator,
+ // the size hint is then equal to the that of the inner iterator's.
+ (lo, hi)
+ } else {
+ // If the first `N` items are not yet yielded by the inner iterator,
+ // the first `N` elements should be counted as one window, so both bounds
+ // should subtract `N - 1`.
+ (lo.saturating_sub(N - 1), hi.map(|hi| hi.saturating_sub(N - 1)))
+ }
+ }
+}
+
+impl<T, const N: usize> Buffer<T, N> {
+ fn try_from_iter(iter: &mut impl Iterator<Item = T>) -> Option<Self> {
+ let first_half = crate::array::iter_next_chunk(iter).ok()?;
+ let buffer = [MaybeUninit::new(first_half).transpose(), MaybeUninit::uninit_array()];
+ Some(Self { buffer, start: 0 })
+ }
+
+ #[inline]
+ fn buffer_ptr(&self) -> *const MaybeUninit<T> {
+ self.buffer.as_ptr().cast()
+ }
+
+ #[inline]
+ fn buffer_mut_ptr(&mut self) -> *mut MaybeUninit<T> {
+ self.buffer.as_mut_ptr().cast()
+ }
+
+ #[inline]
+ fn as_array_ref(&self) -> &[T; N] {
+ debug_assert!(self.start + N <= 2 * N);
+
+ // SAFETY: our invariant guarantees these elements are initialized.
+ unsafe { &*self.buffer_ptr().add(self.start).cast() }
+ }
+
+ #[inline]
+ fn as_uninit_array_mut(&mut self) -> &mut MaybeUninit<[T; N]> {
+ debug_assert!(self.start + N <= 2 * N);
+
+ // SAFETY: our invariant guarantees these elements are in bounds.
+ unsafe { &mut *self.buffer_mut_ptr().add(self.start).cast() }
+ }
+
+ /// Pushes a new item `next` to the back, and pops the front-most one.
+ ///
+ /// All the elements will be shifted to the front end when pushing reaches
+ /// the back end.
+ fn push(&mut self, next: T) {
+ let buffer_mut_ptr = self.buffer_mut_ptr();
+ debug_assert!(self.start + N <= 2 * N);
+
+ let to_drop = if self.start == N {
+ // We have reached the end of our buffer and have to copy
+ // everything to the start. Example layout for N = 3.
+ //
+ // 0 1 2 3 4 5 0 1 2 3 4 5
+ // ┌───┬───┬───┬───┬───┬───┐ ┌───┬───┬───┬───┬───┬───┐
+ // │ - │ - │ - │ a │ b │ c │ -> │ b │ c │ n │ - │ - │ - │
+ // └───┴───┴───┴───┴───┴───┘ └───┴───┴───┴───┴───┴───┘
+ // ↑ ↑
+ // start start
+
+ // SAFETY: the two pointers are valid for reads/writes of N -1
+ // elements because our array's size is semantically 2 * N. The
+ // regions also don't overlap for the same reason.
+ //
+ // We leave the old elements in place. As soon as `start` is set
+ // to 0, we treat them as uninitialized and treat their copies
+ // as initialized.
+ let to_drop = unsafe {
+ ptr::copy_nonoverlapping(buffer_mut_ptr.add(self.start + 1), buffer_mut_ptr, N - 1);
+ (*buffer_mut_ptr.add(N - 1)).write(next);
+ buffer_mut_ptr.add(self.start)
+ };
+ self.start = 0;
+ to_drop
+ } else {
+ // SAFETY: `self.start` is < N as guaranteed by the invariant
+ // plus the check above. Even if the drop at the end panics,
+ // the invariant is upheld.
+ //
+ // Example layout for N = 3:
+ //
+ // 0 1 2 3 4 5 0 1 2 3 4 5
+ // ┌───┬───┬───┬───┬───┬───┐ ┌───┬───┬───┬───┬───┬───┐
+ // │ - │ a │ b │ c │ - │ - │ -> │ - │ - │ b │ c │ n │ - │
+ // └───┴───┴───┴───┴───┴───┘ └───┴───┴───┴───┴───┴───┘
+ // ↑ ↑
+ // start start
+ //
+ let to_drop = unsafe {
+ (*buffer_mut_ptr.add(self.start + N)).write(next);
+ buffer_mut_ptr.add(self.start)
+ };
+ self.start += 1;
+ to_drop
+ };
+
+ // SAFETY: the index is valid and this is element `a` in the
+ // diagram above and has not been dropped yet.
+ unsafe { ptr::drop_in_place(to_drop.cast::<T>()) };
+ }
+}
+
+impl<T: Clone, const N: usize> Clone for Buffer<T, N> {
+ fn clone(&self) -> Self {
+ let mut buffer = Buffer {
+ buffer: [MaybeUninit::uninit_array(), MaybeUninit::uninit_array()],
+ start: self.start,
+ };
+ buffer.as_uninit_array_mut().write(self.as_array_ref().clone());
+ buffer
+ }
+}
+
+impl<I, const N: usize> Clone for MapWindowsInner<I, N>
+where
+ I: Iterator + Clone,
+ I::Item: Clone,
+{
+ fn clone(&self) -> Self {
+ Self { iter: self.iter.clone(), buffer: self.buffer.clone() }
+ }
+}
+
+impl<T, const N: usize> Drop for Buffer<T, N> {
+ fn drop(&mut self) {
+ // SAFETY: our invariant guarantees that N elements starting from
+ // `self.start` are initialized. We drop them here.
+ unsafe {
+ let initialized_part: *mut [T] = crate::ptr::slice_from_raw_parts_mut(
+ self.buffer_mut_ptr().add(self.start).cast(),
+ N,
+ );
+ ptr::drop_in_place(initialized_part);
+ }
+ }
+}
+
+#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")]
+impl<I, F, R, const N: usize> Iterator for MapWindows<I, F, N>
+where
+ I: Iterator,
+ F: FnMut(&[I::Item; N]) -> R,
+{
+ type Item = R;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let window = self.inner.next_window()?;
+ let out = (self.f)(window);
+ Some(out)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+// Note that even if the inner iterator not fused, the `MapWindows` is still fused,
+// because we don't allow "holes" in the mapping window.
+#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")]
+impl<I, F, R, const N: usize> FusedIterator for MapWindows<I, F, N>
+where
+ I: Iterator,
+ F: FnMut(&[I::Item; N]) -> R,
+{
+}
+
+#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")]
+impl<I, F, R, const N: usize> ExactSizeIterator for MapWindows<I, F, N>
+where
+ I: ExactSizeIterator,
+ F: FnMut(&[I::Item; N]) -> R,
+{
+}
+
+#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")]
+impl<I: Iterator + fmt::Debug, F, const N: usize> fmt::Debug for MapWindows<I, F, N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("MapWindows").field("iter", &self.inner.iter).finish()
+ }
+}
+
+#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")]
+impl<I, F, const N: usize> Clone for MapWindows<I, F, N>
+where
+ I: Iterator + Clone,
+ F: Clone,
+ I::Item: Clone,
+{
+ fn clone(&self) -> Self {
+ Self { f: self.f.clone(), inner: self.inner.clone() }
+ }
+}
diff --git a/library/core/src/iter/adapters/mod.rs b/library/core/src/iter/adapters/mod.rs
index 8cc2b7cec..6f4fa7010 100644
--- a/library/core/src/iter/adapters/mod.rs
+++ b/library/core/src/iter/adapters/mod.rs
@@ -16,6 +16,7 @@ mod inspect;
mod intersperse;
mod map;
mod map_while;
+mod map_windows;
mod peekable;
mod rev;
mod scan;
@@ -57,6 +58,9 @@ pub use self::intersperse::{Intersperse, IntersperseWith};
#[stable(feature = "iter_map_while", since = "1.57.0")]
pub use self::map_while::MapWhile;
+#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")]
+pub use self::map_windows::MapWindows;
+
#[unstable(feature = "trusted_random_access", issue = "none")]
pub use self::zip::TrustedRandomAccess;