diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:18:32 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:18:32 +0000 |
commit | 4547b622d8d29df964fa2914213088b148c498fc (patch) | |
tree | 9fc6b25f3c3add6b745be9a2400a6e96140046e9 /vendor/chalk-ir/src/fold | |
parent | Releasing progress-linux version 1.66.0+dfsg1-1~progress7.99u1. (diff) | |
download | rustc-4547b622d8d29df964fa2914213088b148c498fc.tar.xz rustc-4547b622d8d29df964fa2914213088b148c498fc.zip |
Merging upstream version 1.67.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/chalk-ir/src/fold')
-rw-r--r-- | vendor/chalk-ir/src/fold/binder_impls.rs | 73 | ||||
-rw-r--r-- | vendor/chalk-ir/src/fold/boring_impls.rs | 244 | ||||
-rw-r--r-- | vendor/chalk-ir/src/fold/in_place.rs | 263 | ||||
-rw-r--r-- | vendor/chalk-ir/src/fold/shift.rs | 177 | ||||
-rw-r--r-- | vendor/chalk-ir/src/fold/subst.rs | 118 |
5 files changed, 875 insertions, 0 deletions
diff --git a/vendor/chalk-ir/src/fold/binder_impls.rs b/vendor/chalk-ir/src/fold/binder_impls.rs new file mode 100644 index 000000000..1f44c1629 --- /dev/null +++ b/vendor/chalk-ir/src/fold/binder_impls.rs @@ -0,0 +1,73 @@ +//! This module contains impls of `TypeFoldable` for those types that +//! introduce binders. +//! +//! The more interesting impls of `TypeFoldable` remain in the `fold` module. + +use crate::*; + +impl<I: Interner> TypeFoldable<I> for FnPointer<I> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let FnPointer { + num_binders, + substitution, + sig, + } = self; + Ok(FnPointer { + num_binders, + substitution: substitution.try_fold_with(folder, outer_binder.shifted_in())?, + sig: FnSig { + abi: sig.abi, + safety: sig.safety, + variadic: sig.variadic, + }, + }) + } +} + +impl<T, I: Interner> TypeFoldable<I> for Binders<T> +where + T: HasInterner<Interner = I> + TypeFoldable<I>, + I: Interner, +{ + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let Binders { + binders: self_binders, + value: self_value, + } = self; + let value = self_value.try_fold_with(folder, outer_binder.shifted_in())?; + let binders = VariableKinds { + interned: self_binders.interned().clone(), + }; + Ok(Binders::new(binders, value)) + } +} + +impl<I, T> TypeFoldable<I> for Canonical<T> +where + I: Interner, + T: HasInterner<Interner = I> + TypeFoldable<I>, +{ + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let Canonical { + binders: self_binders, + value: self_value, + } = self; + let value = self_value.try_fold_with(folder, outer_binder.shifted_in())?; + let binders = CanonicalVarKinds { + interned: self_binders.interned().clone(), + }; + Ok(Canonical { binders, value }) + } +} diff --git a/vendor/chalk-ir/src/fold/boring_impls.rs b/vendor/chalk-ir/src/fold/boring_impls.rs new file mode 100644 index 000000000..fb4e3e30b --- /dev/null +++ b/vendor/chalk-ir/src/fold/boring_impls.rs @@ -0,0 +1,244 @@ +//! This module contains "rote and uninteresting" impls of `TypeFoldable` for +//! various types. In general, we prefer to derive `TypeFoldable`, but +//! sometimes that doesn't work for whatever reason. +//! +//! The more interesting impls of `TypeFoldable` remain in the `fold` module. + +use super::in_place; +use crate::*; +use std::marker::PhantomData; + +impl<T: TypeFoldable<I>, I: Interner> TypeFoldable<I> for Vec<T> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + in_place::fallible_map_vec(self, |e| e.try_fold_with(folder, outer_binder)) + } +} + +impl<T: TypeFoldable<I>, I: Interner> TypeFoldable<I> for Box<T> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + in_place::fallible_map_box(self, |e| e.try_fold_with(folder, outer_binder)) + } +} + +macro_rules! tuple_fold { + ($($n:ident),*) => { + impl<$($n: TypeFoldable<I>,)* I: Interner> TypeFoldable<I> for ($($n,)*) { + fn try_fold_with<Error>(self, folder: &mut dyn FallibleTypeFolder<I, Error = Error>, outer_binder: DebruijnIndex) -> Result<Self, Error> + { + #[allow(non_snake_case)] + let ($($n),*) = self; + Ok(($($n.try_fold_with(folder, outer_binder)?,)*)) + } + } + } +} + +tuple_fold!(A, B); +tuple_fold!(A, B, C); +tuple_fold!(A, B, C, D); +tuple_fold!(A, B, C, D, E); + +impl<T: TypeFoldable<I>, I: Interner> TypeFoldable<I> for Option<T> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + match self { + None => Ok(None), + Some(e) => Ok(Some(e.try_fold_with(folder, outer_binder)?)), + } + } +} + +impl<I: Interner> TypeFoldable<I> for GenericArg<I> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let interner = folder.interner(); + + let data = self + .data(interner) + .clone() + .try_fold_with(folder, outer_binder)?; + Ok(GenericArg::new(interner, data)) + } +} + +impl<I: Interner> TypeFoldable<I> for Substitution<I> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let interner = folder.interner(); + + let folded = self + .iter(interner) + .cloned() + .map(|p| p.try_fold_with(folder, outer_binder)); + Substitution::from_fallible(interner, folded) + } +} + +impl<I: Interner> TypeFoldable<I> for Goals<I> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let interner = folder.interner(); + let folded = self + .iter(interner) + .cloned() + .map(|p| p.try_fold_with(folder, outer_binder)); + Goals::from_fallible(interner, folded) + } +} + +impl<I: Interner> TypeFoldable<I> for ProgramClauses<I> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let interner = folder.interner(); + let folded = self + .iter(interner) + .cloned() + .map(|p| p.try_fold_with(folder, outer_binder)); + ProgramClauses::from_fallible(interner, folded) + } +} + +impl<I: Interner> TypeFoldable<I> for QuantifiedWhereClauses<I> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let interner = folder.interner(); + let folded = self + .iter(interner) + .cloned() + .map(|p| p.try_fold_with(folder, outer_binder)); + QuantifiedWhereClauses::from_fallible(interner, folded) + } +} + +impl<I: Interner> TypeFoldable<I> for Constraints<I> { + fn try_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> Result<Self, E> { + let interner = folder.interner(); + let folded = self + .iter(interner) + .cloned() + .map(|p| p.try_fold_with(folder, outer_binder)); + Constraints::from_fallible(interner, folded) + } +} + +#[doc(hidden)] +#[macro_export] +macro_rules! copy_fold { + ($t:ty) => { + impl<I: Interner> $crate::fold::TypeFoldable<I> for $t { + fn try_fold_with<E>( + self, + _folder: &mut dyn ($crate::fold::FallibleTypeFolder<I, Error = E>), + _outer_binder: DebruijnIndex, + ) -> ::std::result::Result<Self, E> { + Ok(self) + } + } + }; +} + +copy_fold!(bool); +copy_fold!(usize); +copy_fold!(UniverseIndex); +copy_fold!(PlaceholderIndex); +copy_fold!(QuantifierKind); +copy_fold!(DebruijnIndex); +copy_fold!(()); +copy_fold!(UintTy); +copy_fold!(IntTy); +copy_fold!(FloatTy); +copy_fold!(Scalar); +copy_fold!(ClausePriority); +copy_fold!(Mutability); +copy_fold!(Safety); + +#[doc(hidden)] +#[macro_export] +macro_rules! id_fold { + ($t:ident) => { + impl<I: Interner> $crate::fold::TypeFoldable<I> for $t<I> { + fn try_fold_with<E>( + self, + _folder: &mut dyn ($crate::fold::FallibleTypeFolder<I, Error = E>), + _outer_binder: DebruijnIndex, + ) -> ::std::result::Result<Self, E> { + Ok(self) + } + } + }; +} + +id_fold!(ImplId); +id_fold!(AdtId); +id_fold!(TraitId); +id_fold!(AssocTypeId); +id_fold!(OpaqueTyId); +id_fold!(FnDefId); +id_fold!(ClosureId); +id_fold!(GeneratorId); +id_fold!(ForeignDefId); + +impl<I: Interner> TypeSuperFoldable<I> for ProgramClauseData<I> { + fn try_super_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> ::std::result::Result<Self, E> { + Ok(ProgramClauseData( + self.0.try_fold_with(folder, outer_binder)?, + )) + } +} + +impl<I: Interner> TypeSuperFoldable<I> for ProgramClause<I> { + fn try_super_fold_with<E>( + self, + folder: &mut dyn FallibleTypeFolder<I, Error = E>, + outer_binder: DebruijnIndex, + ) -> ::std::result::Result<Self, E> { + let clause = self.data(folder.interner()).clone(); + Ok(clause + .try_super_fold_with(folder, outer_binder)? + .intern(folder.interner())) + } +} + +impl<I: Interner> TypeFoldable<I> for PhantomData<I> { + fn try_fold_with<E>( + self, + _folder: &mut dyn FallibleTypeFolder<I, Error = E>, + _outer_binder: DebruijnIndex, + ) -> ::std::result::Result<Self, E> { + Ok(PhantomData) + } +} diff --git a/vendor/chalk-ir/src/fold/in_place.rs b/vendor/chalk-ir/src/fold/in_place.rs new file mode 100644 index 000000000..263da617d --- /dev/null +++ b/vendor/chalk-ir/src/fold/in_place.rs @@ -0,0 +1,263 @@ +//! Subroutines to help implementers of `TypeFoldable` avoid unnecessary heap allocations. + +use std::marker::PhantomData; +use std::{mem, ptr}; + +fn is_zst<T>() -> bool { + mem::size_of::<T>() == 0 +} + +fn is_layout_identical<T, U>() -> bool { + mem::size_of::<T>() == mem::size_of::<U>() && mem::align_of::<T>() == mem::align_of::<U>() +} + +/// Maps a `Box<T>` to a `Box<U>`, reusing the underlying storage if possible. +pub(super) fn fallible_map_box<T, U, E>( + b: Box<T>, + map: impl FnOnce(T) -> Result<U, E>, +) -> Result<Box<U>, E> { + // This optimization is only valid when `T` and `U` have the same size/alignment and is not + // useful for ZSTs. + if !is_layout_identical::<T, U>() || is_zst::<T>() { + return map(*b).map(Box::new); + } + + let raw = Box::into_raw(b); + unsafe { + let val = ptr::read(raw); + + // Box<T> -> Box<MaybeUninit<U>> + let mut raw: Box<mem::MaybeUninit<U>> = Box::from_raw(raw.cast()); + + // If `map` panics or returns an error, `raw` will free the memory associated with `b`, but + // not drop the boxed value itself since it is wrapped in `MaybeUninit`. This is what we + // want since the boxed value was moved into `map`. + let mapped_val = map(val)?; + ptr::write(raw.as_mut_ptr(), mapped_val); + + // Box<MaybeUninit<U>> -> Box<U> + Ok(Box::from_raw(Box::into_raw(raw).cast())) + } +} + +/// Maps a `Vec<T>` to a `Vec<U>`, reusing the underlying storage if possible. +pub(super) fn fallible_map_vec<T, U, E>( + vec: Vec<T>, + mut map: impl FnMut(T) -> Result<U, E>, +) -> Result<Vec<U>, E> { + // This optimization is only valid when `T` and `U` have the same size/alignment and is not + // useful for ZSTs. + if !is_layout_identical::<T, U>() || is_zst::<T>() { + return vec.into_iter().map(map).collect(); + } + + let mut vec = VecMappedInPlace::<T, U>::new(vec); + + unsafe { + for i in 0..vec.len { + let place = vec.ptr.add(i); + let val = ptr::read(place); + + // Set `map_in_progress` so the drop impl for `VecMappedInPlace` can handle the other + // elements correctly in case `map` panics or returns an error. + vec.map_in_progress = i; + let mapped_val = map(val)?; + + ptr::write(place as *mut U, mapped_val); + } + + Ok(vec.finish()) + } +} + +/// Takes ownership of a `Vec` that is being mapped in place, cleaning up if the map fails. +struct VecMappedInPlace<T, U> { + ptr: *mut T, + len: usize, + cap: usize, + + map_in_progress: usize, + _elem_tys: PhantomData<(T, U)>, +} + +impl<T, U> VecMappedInPlace<T, U> { + fn new(mut vec: Vec<T>) -> Self { + assert!(is_layout_identical::<T, U>()); + + // FIXME: This is just `Vec::into_raw_parts`. Use that instead when it is stabilized. + let ptr = vec.as_mut_ptr(); + let len = vec.len(); + let cap = vec.capacity(); + mem::forget(vec); + + VecMappedInPlace { + ptr, + len, + cap, + + map_in_progress: 0, + _elem_tys: PhantomData, + } + } + + /// Converts back into a `Vec` once the map is complete. + unsafe fn finish(self) -> Vec<U> { + let this = mem::ManuallyDrop::new(self); + Vec::from_raw_parts(this.ptr as *mut U, this.len, this.cap) + } +} + +/// `VecMappedInPlace` drops everything but the element that was passed to `map` when it panicked or +/// returned an error. Everything before that index in the vector has type `U` (it has been mapped) +/// and everything after it has type `T` (it has not been mapped). +/// +/// ```text +/// mapped +/// | not yet mapped +/// |----| |-----| +/// [UUUU UxTT TTTT] +/// ^ +/// `map_in_progress` (not dropped) +/// ``` +impl<T, U> Drop for VecMappedInPlace<T, U> { + fn drop(&mut self) { + // Drop mapped elements of type `U`. + for i in 0..self.map_in_progress { + unsafe { + ptr::drop_in_place(self.ptr.add(i) as *mut U); + } + } + + // Drop unmapped elements of type `T`. + for i in (self.map_in_progress + 1)..self.len { + unsafe { + ptr::drop_in_place(self.ptr.add(i)); + } + } + + // Free the underlying storage for the `Vec`. + // `len` is 0 because the elements were handled above. + unsafe { + Vec::from_raw_parts(self.ptr, 0, self.cap); + } + } +} + +#[cfg(test)] +mod tests { + use std::fmt; + use std::sync::{Arc, Mutex}; + + /// A wrapper around `T` that records when it is dropped. + struct RecordDrop<T: fmt::Display> { + id: T, + drops: Arc<Mutex<Vec<String>>>, + } + + impl<T: fmt::Display> RecordDrop<T> { + fn new(id: T, drops: &Arc<Mutex<Vec<String>>>) -> Self { + RecordDrop { + id, + drops: drops.clone(), + } + } + } + + impl RecordDrop<u8> { + fn map_to_char(self) -> RecordDrop<char> { + let this = std::mem::ManuallyDrop::new(self); + RecordDrop { + id: (this.id + b'A') as char, + drops: this.drops.clone(), + } + } + } + + impl<T: fmt::Display> Drop for RecordDrop<T> { + fn drop(&mut self) { + self.drops.lock().unwrap().push(format!("{}", self.id)); + } + } + + #[test] + fn vec_no_cleanup_after_success() { + let drops = Arc::new(Mutex::new(Vec::new())); + let to_fold = (0u8..5).map(|i| RecordDrop::new(i, &drops)).collect(); + + let res: Result<_, ()> = super::fallible_map_vec(to_fold, |x| Ok(x.map_to_char())); + + assert!(res.is_ok()); + assert!(drops.lock().unwrap().is_empty()); + } + + #[test] + fn vec_cleanup_after_panic() { + let drops = Arc::new(Mutex::new(Vec::new())); + let to_fold = (0u8..5).map(|i| RecordDrop::new(i, &drops)).collect(); + + let res = std::panic::catch_unwind(|| { + let _: Result<_, ()> = super::fallible_map_vec(to_fold, |x| { + if x.id == 3 { + panic!(); + } + + Ok(x.map_to_char()) + }); + }); + + assert!(res.is_err()); + assert_eq!(*drops.lock().unwrap(), &["3", "A", "B", "C", "4"]); + } + + #[test] + fn vec_cleanup_after_early_return() { + let drops = Arc::new(Mutex::new(Vec::new())); + let to_fold = (0u8..5).map(|i| RecordDrop::new(i, &drops)).collect(); + + let res = super::fallible_map_vec(to_fold, |x| { + if x.id == 2 { + return Err(()); + } + + Ok(x.map_to_char()) + }); + + assert!(res.is_err()); + assert_eq!(*drops.lock().unwrap(), &["2", "A", "B", "3", "4"]); + } + + #[test] + fn box_no_cleanup_after_success() { + let drops = Arc::new(Mutex::new(Vec::new())); + let to_fold = Box::new(RecordDrop::new(0, &drops)); + + let res: Result<Box<_>, ()> = super::fallible_map_box(to_fold, |x| Ok(x.map_to_char())); + + assert!(res.is_ok()); + assert!(drops.lock().unwrap().is_empty()); + } + + #[test] + fn box_cleanup_after_panic() { + let drops = Arc::new(Mutex::new(Vec::new())); + let to_fold = Box::new(RecordDrop::new(0, &drops)); + + let res = std::panic::catch_unwind(|| { + let _: Result<Box<()>, ()> = super::fallible_map_box(to_fold, |_| panic!()); + }); + + assert!(res.is_err()); + assert_eq!(*drops.lock().unwrap(), &["0"]); + } + + #[test] + fn box_cleanup_after_early_return() { + let drops = Arc::new(Mutex::new(Vec::new())); + let to_fold = Box::new(RecordDrop::new(0, &drops)); + + let res: Result<Box<()>, _> = super::fallible_map_box(to_fold, |_| Err(())); + + assert!(res.is_err()); + assert_eq!(*drops.lock().unwrap(), &["0"]); + } +} diff --git a/vendor/chalk-ir/src/fold/shift.rs b/vendor/chalk-ir/src/fold/shift.rs new file mode 100644 index 000000000..f7e5e4a4a --- /dev/null +++ b/vendor/chalk-ir/src/fold/shift.rs @@ -0,0 +1,177 @@ +//! Shifting of debruijn indices + +use crate::*; + +/// Methods for converting debruijn indices to move values into or out +/// of binders. +pub trait Shift<I: Interner>: TypeFoldable<I> { + /// Shifts this term in one level of binders. + fn shifted_in(self, interner: I) -> Self; + + /// Shifts a term valid at `outer_binder` so that it is + /// valid at the innermost binder. See [`DebruijnIndex::shifted_in_from`] + /// for a detailed explanation. + fn shifted_in_from(self, interner: I, source_binder: DebruijnIndex) -> Self; + + /// Shifts this term out one level of binders. + fn shifted_out(self, interner: I) -> Fallible<Self>; + + /// Shifts a term valid at the innermost binder so that it is + /// valid at `outer_binder`. See [`DebruijnIndex::shifted_out_to`] + /// for a detailed explanation. + fn shifted_out_to(self, interner: I, target_binder: DebruijnIndex) -> Fallible<Self>; +} + +impl<T: TypeFoldable<I>, I: Interner> Shift<I> for T { + fn shifted_in(self, interner: I) -> Self { + self.shifted_in_from(interner, DebruijnIndex::ONE) + } + + fn shifted_in_from(self, interner: I, source_binder: DebruijnIndex) -> T { + self.try_fold_with( + &mut Shifter { + source_binder, + interner, + }, + DebruijnIndex::INNERMOST, + ) + .unwrap() + } + + fn shifted_out_to(self, interner: I, target_binder: DebruijnIndex) -> Fallible<T> { + self.try_fold_with( + &mut DownShifter { + target_binder, + interner, + }, + DebruijnIndex::INNERMOST, + ) + } + + fn shifted_out(self, interner: I) -> Fallible<Self> { + self.shifted_out_to(interner, DebruijnIndex::ONE) + } +} + +/// A folder that adjusts debruijn indices by a certain amount. +#[derive(FallibleTypeFolder)] +struct Shifter<I: Interner> { + source_binder: DebruijnIndex, + interner: I, +} + +impl<I: Interner> Shifter<I> { + /// Given a free variable at `depth`, shifts that depth to `depth + /// + self.adjustment`, and then wraps *that* within the internal + /// set `binders`. + fn adjust(&self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> BoundVar { + bound_var + .shifted_in_from(self.source_binder) + .shifted_in_from(outer_binder) + } +} + +impl<I: Interner> TypeFolder<I> for Shifter<I> { + fn as_dyn(&mut self) -> &mut dyn TypeFolder<I> { + self + } + + fn fold_free_var_ty(&mut self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> Ty<I> { + TyKind::<I>::BoundVar(self.adjust(bound_var, outer_binder)) + .intern(TypeFolder::interner(self)) + } + + fn fold_free_var_lifetime( + &mut self, + bound_var: BoundVar, + outer_binder: DebruijnIndex, + ) -> Lifetime<I> { + LifetimeData::<I>::BoundVar(self.adjust(bound_var, outer_binder)) + .intern(TypeFolder::interner(self)) + } + + fn fold_free_var_const( + &mut self, + ty: Ty<I>, + bound_var: BoundVar, + outer_binder: DebruijnIndex, + ) -> Const<I> { + // const types don't have free variables, so we can skip folding `ty` + self.adjust(bound_var, outer_binder) + .to_const(TypeFolder::interner(self), ty) + } + + fn interner(&self) -> I { + self.interner + } +} + +//--------------------------------------------------------------------------- + +/// A shifter that reduces debruijn indices -- in other words, which lifts a value +/// *out* from binders. Consider this example: +/// +struct DownShifter<I> { + target_binder: DebruijnIndex, + interner: I, +} + +impl<I> DownShifter<I> { + /// Given a reference to a free variable at depth `depth` + /// (appearing within `binders` internal binders), attempts to + /// lift that free variable out from `adjustment` levels of + /// binders (i.e., convert it to depth `depth - + /// self.adjustment`). If the free variable is bound by one of + /// those internal binders (i.e., `depth < self.adjustment`) the + /// this will fail with `Err`. Otherwise, returns the variable at + /// this new depth (but adjusted to appear within `binders`). + fn adjust(&self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> Fallible<BoundVar> { + match bound_var.shifted_out_to(self.target_binder) { + Some(bound_var1) => Ok(bound_var1.shifted_in_from(outer_binder)), + None => Err(NoSolution), + } + } +} + +impl<I: Interner> FallibleTypeFolder<I> for DownShifter<I> { + type Error = NoSolution; + + fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<I, Error = Self::Error> { + self + } + + fn try_fold_free_var_ty( + &mut self, + bound_var: BoundVar, + outer_binder: DebruijnIndex, + ) -> Fallible<Ty<I>> { + Ok(TyKind::<I>::BoundVar(self.adjust(bound_var, outer_binder)?).intern(self.interner())) + } + + fn try_fold_free_var_lifetime( + &mut self, + bound_var: BoundVar, + outer_binder: DebruijnIndex, + ) -> Fallible<Lifetime<I>> { + Ok( + LifetimeData::<I>::BoundVar(self.adjust(bound_var, outer_binder)?) + .intern(self.interner()), + ) + } + + fn try_fold_free_var_const( + &mut self, + ty: Ty<I>, + bound_var: BoundVar, + outer_binder: DebruijnIndex, + ) -> Fallible<Const<I>> { + // const types don't have free variables, so we can skip folding `ty` + Ok(self + .adjust(bound_var, outer_binder)? + .to_const(self.interner(), ty)) + } + + fn interner(&self) -> I { + self.interner + } +} diff --git a/vendor/chalk-ir/src/fold/subst.rs b/vendor/chalk-ir/src/fold/subst.rs new file mode 100644 index 000000000..7cff8d89c --- /dev/null +++ b/vendor/chalk-ir/src/fold/subst.rs @@ -0,0 +1,118 @@ +use super::*; +use crate::fold::shift::Shift; + +/// Substitution used during folding +#[derive(FallibleTypeFolder)] +pub struct Subst<'s, I: Interner> { + /// Values to substitute. A reference to a free variable with + /// index `i` will be mapped to `parameters[i]` -- if `i > + /// parameters.len()`, then we will leave the variable untouched. + parameters: &'s [GenericArg<I>], + interner: I, +} + +impl<I: Interner> Subst<'_, I> { + /// Applies the substitution by folding + pub fn apply<T: TypeFoldable<I>>(interner: I, parameters: &[GenericArg<I>], value: T) -> T { + value + .try_fold_with( + &mut Subst { + parameters, + interner, + }, + DebruijnIndex::INNERMOST, + ) + .unwrap() + } +} + +impl<I: Interner> TypeFolder<I> for Subst<'_, I> { + fn as_dyn(&mut self) -> &mut dyn TypeFolder<I> { + self + } + + /// We are eliminating one binder, but binders outside of that get preserved. + /// + /// So e.g. consider this: + /// + /// ```notrust + /// for<A, B> { for<C> { [A, C] } } + /// // ^ the binder we are substituing with `[u32]` + /// ``` + /// + /// Here, `A` would be `^1.0` and `C` would be `^0.0`. We will replace `^0.0` with the + /// 0th index from the list (`u32`). We will convert `^1.0` (A) to `^0.0` -- i.e., shift + /// it **out** of one level of binder (the `for<C>` binder we are eliminating). + /// + /// This gives us as a result: + /// + /// ```notrust + /// for<A, B> { [A, u32] } + /// ^ represented as `^0.0` + /// ``` + fn fold_free_var_ty(&mut self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> Ty<I> { + if let Some(index) = bound_var.index_if_innermost() { + match self.parameters[index].data(TypeFolder::interner(self)) { + GenericArgData::Ty(t) => t + .clone() + .shifted_in_from(TypeFolder::interner(self), outer_binder), + _ => panic!("mismatched kinds in substitution"), + } + } else { + bound_var + .shifted_out() + .expect("cannot fail because this is not the innermost") + .shifted_in_from(outer_binder) + .to_ty(TypeFolder::interner(self)) + } + } + + /// see `fold_free_var_ty` + fn fold_free_var_lifetime( + &mut self, + bound_var: BoundVar, + outer_binder: DebruijnIndex, + ) -> Lifetime<I> { + if let Some(index) = bound_var.index_if_innermost() { + match self.parameters[index].data(TypeFolder::interner(self)) { + GenericArgData::Lifetime(l) => l + .clone() + .shifted_in_from(TypeFolder::interner(self), outer_binder), + _ => panic!("mismatched kinds in substitution"), + } + } else { + bound_var + .shifted_out() + .unwrap() + .shifted_in_from(outer_binder) + .to_lifetime(TypeFolder::interner(self)) + } + } + + /// see `fold_free_var_ty` + fn fold_free_var_const( + &mut self, + ty: Ty<I>, + bound_var: BoundVar, + outer_binder: DebruijnIndex, + ) -> Const<I> { + if let Some(index) = bound_var.index_if_innermost() { + match self.parameters[index].data(TypeFolder::interner(self)) { + GenericArgData::Const(c) => c + .clone() + .shifted_in_from(TypeFolder::interner(self), outer_binder), + _ => panic!("mismatched kinds in substitution"), + } + } else { + bound_var + .shifted_out() + .unwrap() + .shifted_in_from(outer_binder) + .to_const(TypeFolder::interner(self), ty) + } + } + + fn interner(&self) -> I { + self.interner + } +} |