summaryrefslogtreecommitdiffstats
path: root/vendor/chalk-ir-0.80.0/src/fold
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /vendor/chalk-ir-0.80.0/src/fold
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/chalk-ir-0.80.0/src/fold')
-rw-r--r--vendor/chalk-ir-0.80.0/src/fold/binder_impls.rs78
-rw-r--r--vendor/chalk-ir-0.80.0/src/fold/boring_impls.rs256
-rw-r--r--vendor/chalk-ir-0.80.0/src/fold/in_place.rs263
-rw-r--r--vendor/chalk-ir-0.80.0/src/fold/shift.rs185
-rw-r--r--vendor/chalk-ir-0.80.0/src/fold/subst.rs123
5 files changed, 905 insertions, 0 deletions
diff --git a/vendor/chalk-ir-0.80.0/src/fold/binder_impls.rs b/vendor/chalk-ir-0.80.0/src/fold/binder_impls.rs
new file mode 100644
index 000000000..baa912499
--- /dev/null
+++ b/vendor/chalk-ir-0.80.0/src/fold/binder_impls.rs
@@ -0,0 +1,78 @@
+//! This module contains impls of `Fold` for those types that
+//! introduce binders.
+//!
+//! The more interesting impls of `Fold` remain in the `fold` module.
+
+use crate::*;
+
+impl<I: Interner> Fold<I> for FnPointer<I> {
+ type Result = FnPointer<I>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let FnPointer {
+ num_binders,
+ substitution,
+ sig,
+ } = self;
+ Ok(FnPointer {
+ num_binders,
+ substitution: substitution.fold_with(folder, outer_binder.shifted_in())?,
+ sig: FnSig {
+ abi: sig.abi,
+ safety: sig.safety,
+ variadic: sig.variadic,
+ },
+ })
+ }
+}
+
+impl<T, I: Interner> Fold<I> for Binders<T>
+where
+ T: HasInterner<Interner = I> + Fold<I>,
+ <T as Fold<I>>::Result: HasInterner<Interner = I>,
+ I: Interner,
+{
+ type Result = Binders<T::Result>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let Binders {
+ binders: self_binders,
+ value: self_value,
+ } = self;
+ let value = self_value.fold_with(folder, outer_binder.shifted_in())?;
+ let binders = VariableKinds {
+ interned: self_binders.interned().clone(),
+ };
+ Ok(Binders::new(binders, value))
+ }
+}
+
+impl<I, T> Fold<I> for Canonical<T>
+where
+ I: Interner,
+ T: HasInterner<Interner = I> + Fold<I>,
+ <T as Fold<I>>::Result: HasInterner<Interner = I>,
+{
+ type Result = Canonical<T::Result>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let Canonical {
+ binders: self_binders,
+ value: self_value,
+ } = self;
+ let value = self_value.fold_with(folder, outer_binder.shifted_in())?;
+ let binders = CanonicalVarKinds {
+ interned: self_binders.interned().clone(),
+ };
+ Ok(Canonical { binders, value })
+ }
+}
diff --git a/vendor/chalk-ir-0.80.0/src/fold/boring_impls.rs b/vendor/chalk-ir-0.80.0/src/fold/boring_impls.rs
new file mode 100644
index 000000000..9210ecac8
--- /dev/null
+++ b/vendor/chalk-ir-0.80.0/src/fold/boring_impls.rs
@@ -0,0 +1,256 @@
+//! This module contains "rote and uninteresting" impls of `Fold` for
+//! various types. In general, we prefer to derive `Fold`, but
+//! sometimes that doesn't work for whatever reason.
+//!
+//! The more interesting impls of `Fold` remain in the `fold` module.
+
+use super::in_place;
+use crate::*;
+use std::marker::PhantomData;
+
+impl<T: Fold<I>, I: Interner> Fold<I> for Vec<T> {
+ type Result = Vec<T::Result>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ in_place::fallible_map_vec(self, |e| e.fold_with(folder, outer_binder))
+ }
+}
+
+impl<T: Fold<I>, I: Interner> Fold<I> for Box<T> {
+ type Result = Box<T::Result>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ in_place::fallible_map_box(self, |e| e.fold_with(folder, outer_binder))
+ }
+}
+
+macro_rules! tuple_fold {
+ ($($n:ident),*) => {
+ impl<$($n: Fold<I>,)* I: Interner> Fold<I> for ($($n,)*) {
+ type Result = ($($n::Result,)*);
+ fn fold_with<Error>(self, folder: &mut dyn Folder<I, Error = Error>, outer_binder: DebruijnIndex) -> Result<Self::Result, Error>
+ {
+ #[allow(non_snake_case)]
+ let ($($n),*) = self;
+ Ok(($($n.fold_with(folder, outer_binder)?,)*))
+ }
+ }
+ }
+}
+
+tuple_fold!(A, B);
+tuple_fold!(A, B, C);
+tuple_fold!(A, B, C, D);
+tuple_fold!(A, B, C, D, E);
+
+impl<T: Fold<I>, I: Interner> Fold<I> for Option<T> {
+ type Result = Option<T::Result>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ match self {
+ None => Ok(None),
+ Some(e) => Ok(Some(e.fold_with(folder, outer_binder)?)),
+ }
+ }
+}
+
+impl<I: Interner> Fold<I> for GenericArg<I> {
+ type Result = GenericArg<I>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let interner = folder.interner();
+
+ let data = self
+ .data(interner)
+ .clone()
+ .fold_with(folder, outer_binder)?;
+ Ok(GenericArg::new(interner, data))
+ }
+}
+
+impl<I: Interner> Fold<I> for Substitution<I> {
+ type Result = Substitution<I>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let interner = folder.interner();
+
+ let folded = self
+ .iter(interner)
+ .cloned()
+ .map(|p| p.fold_with(folder, outer_binder));
+ Substitution::from_fallible(interner, folded)
+ }
+}
+
+impl<I: Interner> Fold<I> for Goals<I> {
+ type Result = Goals<I>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let interner = folder.interner();
+ let folded = self
+ .iter(interner)
+ .cloned()
+ .map(|p| p.fold_with(folder, outer_binder));
+ Goals::from_fallible(interner, folded)
+ }
+}
+
+impl<I: Interner> Fold<I> for ProgramClauses<I> {
+ type Result = ProgramClauses<I>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let interner = folder.interner();
+ let folded = self
+ .iter(interner)
+ .cloned()
+ .map(|p| p.fold_with(folder, outer_binder));
+ ProgramClauses::from_fallible(interner, folded)
+ }
+}
+
+impl<I: Interner> Fold<I> for QuantifiedWhereClauses<I> {
+ type Result = QuantifiedWhereClauses<I>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let interner = folder.interner();
+ let folded = self
+ .iter(interner)
+ .cloned()
+ .map(|p| p.fold_with(folder, outer_binder));
+ QuantifiedWhereClauses::from_fallible(interner, folded)
+ }
+}
+
+impl<I: Interner> Fold<I> for Constraints<I> {
+ type Result = Constraints<I>;
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self::Result, E> {
+ let interner = folder.interner();
+ let folded = self
+ .iter(interner)
+ .cloned()
+ .map(|p| p.fold_with(folder, outer_binder));
+ Constraints::from_fallible(interner, folded)
+ }
+}
+
+#[doc(hidden)]
+#[macro_export]
+macro_rules! copy_fold {
+ ($t:ty) => {
+ impl<I: Interner> $crate::fold::Fold<I> for $t {
+ type Result = Self;
+ fn fold_with<E>(
+ self,
+ _folder: &mut dyn ($crate::fold::Folder<I, Error = E>),
+ _outer_binder: DebruijnIndex,
+ ) -> ::std::result::Result<Self::Result, E> {
+ Ok(self)
+ }
+ }
+ };
+}
+
+copy_fold!(bool);
+copy_fold!(usize);
+copy_fold!(UniverseIndex);
+copy_fold!(PlaceholderIndex);
+copy_fold!(QuantifierKind);
+copy_fold!(DebruijnIndex);
+copy_fold!(());
+copy_fold!(UintTy);
+copy_fold!(IntTy);
+copy_fold!(FloatTy);
+copy_fold!(Scalar);
+copy_fold!(ClausePriority);
+copy_fold!(Mutability);
+copy_fold!(Safety);
+
+#[doc(hidden)]
+#[macro_export]
+macro_rules! id_fold {
+ ($t:ident) => {
+ impl<I: Interner> $crate::fold::Fold<I> for $t<I> {
+ type Result = $t<I>;
+ fn fold_with<E>(
+ self,
+ _folder: &mut dyn ($crate::fold::Folder<I, Error = E>),
+ _outer_binder: DebruijnIndex,
+ ) -> ::std::result::Result<Self::Result, E> {
+ Ok(self)
+ }
+ }
+ };
+}
+
+id_fold!(ImplId);
+id_fold!(AdtId);
+id_fold!(TraitId);
+id_fold!(AssocTypeId);
+id_fold!(OpaqueTyId);
+id_fold!(FnDefId);
+id_fold!(ClosureId);
+id_fold!(GeneratorId);
+id_fold!(ForeignDefId);
+
+impl<I: Interner> SuperFold<I> for ProgramClauseData<I> {
+ fn super_fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> ::std::result::Result<Self::Result, E> {
+ Ok(ProgramClauseData(self.0.fold_with(folder, outer_binder)?))
+ }
+}
+
+impl<I: Interner> SuperFold<I> for ProgramClause<I> {
+ fn super_fold_with<E>(
+ self,
+ folder: &mut dyn Folder<I, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> ::std::result::Result<Self::Result, E> {
+ let clause = self.data(folder.interner()).clone();
+ Ok(clause
+ .super_fold_with(folder, outer_binder)?
+ .intern(folder.interner()))
+ }
+}
+
+impl<I: Interner> Fold<I> for PhantomData<I> {
+ type Result = PhantomData<I>;
+
+ fn fold_with<E>(
+ self,
+ _folder: &mut dyn Folder<I, Error = E>,
+ _outer_binder: DebruijnIndex,
+ ) -> ::std::result::Result<Self::Result, E> {
+ Ok(PhantomData)
+ }
+}
diff --git a/vendor/chalk-ir-0.80.0/src/fold/in_place.rs b/vendor/chalk-ir-0.80.0/src/fold/in_place.rs
new file mode 100644
index 000000000..c81d9113a
--- /dev/null
+++ b/vendor/chalk-ir-0.80.0/src/fold/in_place.rs
@@ -0,0 +1,263 @@
+//! Subroutines to help implementers of `Fold` avoid unnecessary heap allocations.
+
+use std::marker::PhantomData;
+use std::{mem, ptr};
+
+fn is_zst<T>() -> bool {
+ mem::size_of::<T>() == 0
+}
+
+fn is_layout_identical<T, U>() -> bool {
+ mem::size_of::<T>() == mem::size_of::<U>() && mem::align_of::<T>() == mem::align_of::<U>()
+}
+
+/// Maps a `Box<T>` to a `Box<U>`, reusing the underlying storage if possible.
+pub(super) fn fallible_map_box<T, U, E>(
+ b: Box<T>,
+ map: impl FnOnce(T) -> Result<U, E>,
+) -> Result<Box<U>, E> {
+ // This optimization is only valid when `T` and `U` have the same size/alignment and is not
+ // useful for ZSTs.
+ if !is_layout_identical::<T, U>() || is_zst::<T>() {
+ return map(*b).map(Box::new);
+ }
+
+ let raw = Box::into_raw(b);
+ unsafe {
+ let val = ptr::read(raw);
+
+ // Box<T> -> Box<MaybeUninit<U>>
+ let mut raw: Box<mem::MaybeUninit<U>> = Box::from_raw(raw.cast());
+
+ // If `map` panics or returns an error, `raw` will free the memory associated with `b`, but
+ // not drop the boxed value itself since it is wrapped in `MaybeUninit`. This is what we
+ // want since the boxed value was moved into `map`.
+ let mapped_val = map(val)?;
+ ptr::write(raw.as_mut_ptr(), mapped_val);
+
+ // Box<MaybeUninit<U>> -> Box<U>
+ Ok(Box::from_raw(Box::into_raw(raw).cast()))
+ }
+}
+
+/// Maps a `Vec<T>` to a `Vec<U>`, reusing the underlying storage if possible.
+pub(super) fn fallible_map_vec<T, U, E>(
+ vec: Vec<T>,
+ mut map: impl FnMut(T) -> Result<U, E>,
+) -> Result<Vec<U>, E> {
+ // This optimization is only valid when `T` and `U` have the same size/alignment and is not
+ // useful for ZSTs.
+ if !is_layout_identical::<T, U>() || is_zst::<T>() {
+ return vec.into_iter().map(map).collect();
+ }
+
+ let mut vec = VecMappedInPlace::<T, U>::new(vec);
+
+ unsafe {
+ for i in 0..vec.len {
+ let place = vec.ptr.add(i);
+ let val = ptr::read(place);
+
+ // Set `map_in_progress` so the drop impl for `VecMappedInPlace` can handle the other
+ // elements correctly in case `map` panics or returns an error.
+ vec.map_in_progress = i;
+ let mapped_val = map(val)?;
+
+ ptr::write(place as *mut U, mapped_val);
+ }
+
+ Ok(vec.finish())
+ }
+}
+
+/// Takes ownership of a `Vec` that is being mapped in place, cleaning up if the map fails.
+struct VecMappedInPlace<T, U> {
+ ptr: *mut T,
+ len: usize,
+ cap: usize,
+
+ map_in_progress: usize,
+ _elem_tys: PhantomData<(T, U)>,
+}
+
+impl<T, U> VecMappedInPlace<T, U> {
+ fn new(mut vec: Vec<T>) -> Self {
+ assert!(is_layout_identical::<T, U>());
+
+ // FIXME: This is just `Vec::into_raw_parts`. Use that instead when it is stabilized.
+ let ptr = vec.as_mut_ptr();
+ let len = vec.len();
+ let cap = vec.capacity();
+ mem::forget(vec);
+
+ VecMappedInPlace {
+ ptr,
+ len,
+ cap,
+
+ map_in_progress: 0,
+ _elem_tys: PhantomData,
+ }
+ }
+
+ /// Converts back into a `Vec` once the map is complete.
+ unsafe fn finish(self) -> Vec<U> {
+ let this = mem::ManuallyDrop::new(self);
+ Vec::from_raw_parts(this.ptr as *mut U, this.len, this.cap)
+ }
+}
+
+/// `VecMappedInPlace` drops everything but the element that was passed to `map` when it panicked or
+/// returned an error. Everything before that index in the vector has type `U` (it has been mapped)
+/// and everything after it has type `T` (it has not been mapped).
+///
+/// ```text
+/// mapped
+/// | not yet mapped
+/// |----| |-----|
+/// [UUUU UxTT TTTT]
+/// ^
+/// `map_in_progress` (not dropped)
+/// ```
+impl<T, U> Drop for VecMappedInPlace<T, U> {
+ fn drop(&mut self) {
+ // Drop mapped elements of type `U`.
+ for i in 0..self.map_in_progress {
+ unsafe {
+ ptr::drop_in_place(self.ptr.add(i) as *mut U);
+ }
+ }
+
+ // Drop unmapped elements of type `T`.
+ for i in (self.map_in_progress + 1)..self.len {
+ unsafe {
+ ptr::drop_in_place(self.ptr.add(i));
+ }
+ }
+
+ // Free the underlying storage for the `Vec`.
+ // `len` is 0 because the elements were handled above.
+ unsafe {
+ Vec::from_raw_parts(self.ptr, 0, self.cap);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::fmt;
+ use std::sync::{Arc, Mutex};
+
+ /// A wrapper around `T` that records when it is dropped.
+ struct RecordDrop<T: fmt::Display> {
+ id: T,
+ drops: Arc<Mutex<Vec<String>>>,
+ }
+
+ impl<T: fmt::Display> RecordDrop<T> {
+ fn new(id: T, drops: &Arc<Mutex<Vec<String>>>) -> Self {
+ RecordDrop {
+ id,
+ drops: drops.clone(),
+ }
+ }
+ }
+
+ impl RecordDrop<u8> {
+ fn map_to_char(self) -> RecordDrop<char> {
+ let this = std::mem::ManuallyDrop::new(self);
+ RecordDrop {
+ id: (this.id + b'A') as char,
+ drops: this.drops.clone(),
+ }
+ }
+ }
+
+ impl<T: fmt::Display> Drop for RecordDrop<T> {
+ fn drop(&mut self) {
+ self.drops.lock().unwrap().push(format!("{}", self.id));
+ }
+ }
+
+ #[test]
+ fn vec_no_cleanup_after_success() {
+ let drops = Arc::new(Mutex::new(Vec::new()));
+ let to_fold = (0u8..5).map(|i| RecordDrop::new(i, &drops)).collect();
+
+ let res: Result<_, ()> = super::fallible_map_vec(to_fold, |x| Ok(x.map_to_char()));
+
+ assert!(res.is_ok());
+ assert!(drops.lock().unwrap().is_empty());
+ }
+
+ #[test]
+ fn vec_cleanup_after_panic() {
+ let drops = Arc::new(Mutex::new(Vec::new()));
+ let to_fold = (0u8..5).map(|i| RecordDrop::new(i, &drops)).collect();
+
+ let res = std::panic::catch_unwind(|| {
+ let _: Result<_, ()> = super::fallible_map_vec(to_fold, |x| {
+ if x.id == 3 {
+ panic!();
+ }
+
+ Ok(x.map_to_char())
+ });
+ });
+
+ assert!(res.is_err());
+ assert_eq!(*drops.lock().unwrap(), &["3", "A", "B", "C", "4"]);
+ }
+
+ #[test]
+ fn vec_cleanup_after_early_return() {
+ let drops = Arc::new(Mutex::new(Vec::new()));
+ let to_fold = (0u8..5).map(|i| RecordDrop::new(i, &drops)).collect();
+
+ let res = super::fallible_map_vec(to_fold, |x| {
+ if x.id == 2 {
+ return Err(());
+ }
+
+ Ok(x.map_to_char())
+ });
+
+ assert!(res.is_err());
+ assert_eq!(*drops.lock().unwrap(), &["2", "A", "B", "3", "4"]);
+ }
+
+ #[test]
+ fn box_no_cleanup_after_success() {
+ let drops = Arc::new(Mutex::new(Vec::new()));
+ let to_fold = Box::new(RecordDrop::new(0, &drops));
+
+ let res: Result<Box<_>, ()> = super::fallible_map_box(to_fold, |x| Ok(x.map_to_char()));
+
+ assert!(res.is_ok());
+ assert!(drops.lock().unwrap().is_empty());
+ }
+
+ #[test]
+ fn box_cleanup_after_panic() {
+ let drops = Arc::new(Mutex::new(Vec::new()));
+ let to_fold = Box::new(RecordDrop::new(0, &drops));
+
+ let res = std::panic::catch_unwind(|| {
+ let _: Result<Box<()>, ()> = super::fallible_map_box(to_fold, |_| panic!());
+ });
+
+ assert!(res.is_err());
+ assert_eq!(*drops.lock().unwrap(), &["0"]);
+ }
+
+ #[test]
+ fn box_cleanup_after_early_return() {
+ let drops = Arc::new(Mutex::new(Vec::new()));
+ let to_fold = Box::new(RecordDrop::new(0, &drops));
+
+ let res: Result<Box<()>, _> = super::fallible_map_box(to_fold, |_| Err(()));
+
+ assert!(res.is_err());
+ assert_eq!(*drops.lock().unwrap(), &["0"]);
+ }
+}
diff --git a/vendor/chalk-ir-0.80.0/src/fold/shift.rs b/vendor/chalk-ir-0.80.0/src/fold/shift.rs
new file mode 100644
index 000000000..2ea957b28
--- /dev/null
+++ b/vendor/chalk-ir-0.80.0/src/fold/shift.rs
@@ -0,0 +1,185 @@
+//! Shifting of debruijn indices
+
+use super::Fold;
+use crate::*;
+
+/// Methods for converting debruijn indices to move values into or out
+/// of binders.
+pub trait Shift<I: Interner>: Fold<I> {
+ /// Shifts this term in one level of binders.
+ fn shifted_in(self, interner: I) -> Self::Result;
+
+ /// Shifts a term valid at `outer_binder` so that it is
+ /// valid at the innermost binder. See [`DebruijnIndex::shifted_in_from`]
+ /// for a detailed explanation.
+ fn shifted_in_from(self, interner: I, source_binder: DebruijnIndex) -> Self::Result;
+
+ /// Shifts this term out one level of binders.
+ fn shifted_out(self, interner: I) -> Fallible<Self::Result>;
+
+ /// Shifts a term valid at the innermost binder so that it is
+ /// valid at `outer_binder`. See [`DebruijnIndex::shifted_out_to`]
+ /// for a detailed explanation.
+ fn shifted_out_to(self, interner: I, target_binder: DebruijnIndex) -> Fallible<Self::Result>;
+}
+
+impl<T: Fold<I>, I: Interner> Shift<I> for T {
+ fn shifted_in(self, interner: I) -> Self::Result {
+ self.shifted_in_from(interner, DebruijnIndex::ONE)
+ }
+
+ fn shifted_in_from(self, interner: I, source_binder: DebruijnIndex) -> T::Result {
+ self.fold_with(
+ &mut Shifter {
+ source_binder,
+ interner,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ .unwrap()
+ }
+
+ fn shifted_out_to(self, interner: I, target_binder: DebruijnIndex) -> Fallible<T::Result> {
+ self.fold_with(
+ &mut DownShifter {
+ target_binder,
+ interner,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ fn shifted_out(self, interner: I) -> Fallible<Self::Result> {
+ self.shifted_out_to(interner, DebruijnIndex::ONE)
+ }
+}
+
+/// A folder that adjusts debruijn indices by a certain amount.
+struct Shifter<I> {
+ source_binder: DebruijnIndex,
+ interner: I,
+}
+
+impl<I> Shifter<I> {
+ /// Given a free variable at `depth`, shifts that depth to `depth
+ /// + self.adjustment`, and then wraps *that* within the internal
+ /// set `binders`.
+ fn adjust(&self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> BoundVar {
+ bound_var
+ .shifted_in_from(self.source_binder)
+ .shifted_in_from(outer_binder)
+ }
+}
+
+impl<I: Interner> Folder<I> for Shifter<I> {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn Folder<I, Error = Self::Error> {
+ self
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty<I>> {
+ Ok(TyKind::<I>::BoundVar(self.adjust(bound_var, outer_binder)).intern(self.interner()))
+ }
+
+ fn fold_free_var_lifetime(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime<I>> {
+ Ok(
+ LifetimeData::<I>::BoundVar(self.adjust(bound_var, outer_binder))
+ .intern(self.interner()),
+ )
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty<I>,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const<I>> {
+ // const types don't have free variables, so we can skip folding `ty`
+ Ok(self
+ .adjust(bound_var, outer_binder)
+ .to_const(self.interner(), ty))
+ }
+
+ fn interner(&self) -> I {
+ self.interner
+ }
+}
+
+//---------------------------------------------------------------------------
+
+/// A shifter that reduces debruijn indices -- in other words, which lifts a value
+/// *out* from binders. Consider this example:
+///
+struct DownShifter<I> {
+ target_binder: DebruijnIndex,
+ interner: I,
+}
+
+impl<I> DownShifter<I> {
+ /// Given a reference to a free variable at depth `depth`
+ /// (appearing within `binders` internal binders), attempts to
+ /// lift that free variable out from `adjustment` levels of
+ /// binders (i.e., convert it to depth `depth -
+ /// self.adjustment`). If the free variable is bound by one of
+ /// those internal binders (i.e., `depth < self.adjustment`) the
+ /// this will fail with `Err`. Otherwise, returns the variable at
+ /// this new depth (but adjusted to appear within `binders`).
+ fn adjust(&self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> Fallible<BoundVar> {
+ match bound_var.shifted_out_to(self.target_binder) {
+ Some(bound_var1) => Ok(bound_var1.shifted_in_from(outer_binder)),
+ None => Err(NoSolution),
+ }
+ }
+}
+
+impl<I: Interner> Folder<I> for DownShifter<I> {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn Folder<I, Error = Self::Error> {
+ self
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty<I>> {
+ Ok(TyKind::<I>::BoundVar(self.adjust(bound_var, outer_binder)?).intern(self.interner()))
+ }
+
+ fn fold_free_var_lifetime(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime<I>> {
+ Ok(
+ LifetimeData::<I>::BoundVar(self.adjust(bound_var, outer_binder)?)
+ .intern(self.interner()),
+ )
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty<I>,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const<I>> {
+ // const types don't have free variables, so we can skip folding `ty`
+ Ok(self
+ .adjust(bound_var, outer_binder)?
+ .to_const(self.interner(), ty))
+ }
+
+ fn interner(&self) -> I {
+ self.interner
+ }
+}
diff --git a/vendor/chalk-ir-0.80.0/src/fold/subst.rs b/vendor/chalk-ir-0.80.0/src/fold/subst.rs
new file mode 100644
index 000000000..f7a0b2d69
--- /dev/null
+++ b/vendor/chalk-ir-0.80.0/src/fold/subst.rs
@@ -0,0 +1,123 @@
+use super::*;
+use crate::fold::shift::Shift;
+
+/// Substitution used during folding
+pub struct Subst<'s, I: Interner> {
+ /// Values to substitute. A reference to a free variable with
+ /// index `i` will be mapped to `parameters[i]` -- if `i >
+ /// parameters.len()`, then we will leave the variable untouched.
+ parameters: &'s [GenericArg<I>],
+ interner: I,
+}
+
+impl<I: Interner> Subst<'_, I> {
+ /// Applies the substitution by folding
+ pub fn apply<T: Fold<I>>(interner: I, parameters: &[GenericArg<I>], value: T) -> T::Result {
+ value
+ .fold_with(
+ &mut Subst {
+ parameters,
+ interner,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ .unwrap()
+ }
+}
+
+impl<I: Interner> Folder<I> for Subst<'_, I> {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn Folder<I, Error = Self::Error> {
+ self
+ }
+
+ /// We are eliminating one binder, but binders outside of that get preserved.
+ ///
+ /// So e.g. consider this:
+ ///
+ /// ```notrust
+ /// for<A, B> { for<C> { [A, C] } }
+ /// // ^ the binder we are substituing with `[u32]`
+ /// ```
+ ///
+ /// Here, `A` would be `^1.0` and `C` would be `^0.0`. We will replace `^0.0` with the
+ /// 0th index from the list (`u32`). We will convert `^1.0` (A) to `^0.0` -- i.e., shift
+ /// it **out** of one level of binder (the `for<C>` binder we are eliminating).
+ ///
+ /// This gives us as a result:
+ ///
+ /// ```notrust
+ /// for<A, B> { [A, u32] }
+ /// ^ represented as `^0.0`
+ /// ```
+ fn fold_free_var_ty(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty<I>> {
+ if let Some(index) = bound_var.index_if_innermost() {
+ match self.parameters[index].data(self.interner()) {
+ GenericArgData::Ty(t) => {
+ Ok(t.clone().shifted_in_from(self.interner(), outer_binder))
+ }
+ _ => panic!("mismatched kinds in substitution"),
+ }
+ } else {
+ Ok(bound_var
+ .shifted_out()
+ .expect("cannot fail because this is not the innermost")
+ .shifted_in_from(outer_binder)
+ .to_ty(self.interner()))
+ }
+ }
+
+ /// see `fold_free_var_ty`
+ fn fold_free_var_lifetime(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime<I>> {
+ if let Some(index) = bound_var.index_if_innermost() {
+ match self.parameters[index].data(self.interner()) {
+ GenericArgData::Lifetime(l) => {
+ Ok(l.clone().shifted_in_from(self.interner(), outer_binder))
+ }
+ _ => panic!("mismatched kinds in substitution"),
+ }
+ } else {
+ Ok(bound_var
+ .shifted_out()
+ .unwrap()
+ .shifted_in_from(outer_binder)
+ .to_lifetime(self.interner()))
+ }
+ }
+
+ /// see `fold_free_var_ty`
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty<I>,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const<I>> {
+ if let Some(index) = bound_var.index_if_innermost() {
+ match self.parameters[index].data(self.interner()) {
+ GenericArgData::Const(c) => {
+ Ok(c.clone().shifted_in_from(self.interner(), outer_binder))
+ }
+ _ => panic!("mismatched kinds in substitution"),
+ }
+ } else {
+ Ok(bound_var
+ .shifted_out()
+ .unwrap()
+ .shifted_in_from(outer_binder)
+ .to_const(self.interner(), ty))
+ }
+ }
+
+ fn interner(&self) -> I {
+ self.interner
+ }
+}