summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir-ty/src/infer
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /src/tools/rust-analyzer/crates/hir-ty/src/infer
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir-ty/src/infer')
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs82
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs673
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs1527
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs354
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs295
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs738
6 files changed, 3669 insertions, 0 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
new file mode 100644
index 000000000..3ead92909
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -0,0 +1,82 @@
+//! Inference of closure parameter types based on the closure's expected type.
+
+use chalk_ir::{cast::Cast, AliasEq, AliasTy, FnSubst, WhereClause};
+use hir_def::{expr::ExprId, HasModule};
+use smallvec::SmallVec;
+
+use crate::{
+ to_chalk_trait_id, utils, ChalkTraitId, DynTy, FnPointer, FnSig, Interner, Substitution, Ty,
+ TyExt, TyKind,
+};
+
+use super::{Expectation, InferenceContext};
+
+impl InferenceContext<'_> {
+ pub(super) fn deduce_closure_type_from_expectations(
+ &mut self,
+ closure_expr: ExprId,
+ closure_ty: &Ty,
+ sig_ty: &Ty,
+ expectation: &Expectation,
+ ) {
+ let expected_ty = match expectation.to_option(&mut self.table) {
+ Some(ty) => ty,
+ None => return,
+ };
+
+ // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here.
+ let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty);
+
+ // Deduction based on the expected `dyn Fn` is done separately.
+ if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) {
+ if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) {
+ let expected_sig_ty = TyKind::Function(sig).intern(Interner);
+
+ self.unify(sig_ty, &expected_sig_ty);
+ }
+ }
+ }
+
+ fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option<FnPointer> {
+ // Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
+
+ let fn_traits: SmallVec<[ChalkTraitId; 3]> =
+ utils::fn_traits(self.db.upcast(), self.owner.module(self.db.upcast()).krate())
+ .map(to_chalk_trait_id)
+ .collect();
+
+ let self_ty = TyKind::Error.intern(Interner);
+ let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]);
+ for bound in bounds.iter(Interner) {
+ // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer`
+ if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
+ bound.skip_binders()
+ {
+ let assoc_data = self.db.associated_ty_data(projection.associated_ty_id);
+ if !fn_traits.contains(&assoc_data.trait_id) {
+ return None;
+ }
+
+ // Skip `Self`, get the type argument.
+ let arg = projection.substitution.as_slice(Interner).get(1)?;
+ if let Some(subst) = arg.ty(Interner)?.as_tuple() {
+ let generic_args = subst.as_slice(Interner);
+ let mut sig_tys = Vec::new();
+ for arg in generic_args {
+ sig_tys.push(arg.ty(Interner)?.clone());
+ }
+ sig_tys.push(ty.clone());
+
+ cov_mark::hit!(dyn_fn_param_informs_call_site_closure_signature);
+ return Some(FnPointer {
+ num_binders: bound.len(Interner),
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(Substitution::from_iter(Interner, sig_tys)),
+ });
+ }
+ }
+ }
+
+ None
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
new file mode 100644
index 000000000..f54440bf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -0,0 +1,673 @@
+//! Coercion logic. Coercions are certain type conversions that can implicitly
+//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
+//! like going from `&Vec<T>` to `&[T]`.
+//!
+//! See <https://doc.rust-lang.org/nomicon/coercions.html> and
+//! `librustc_typeck/check/coercion.rs`.
+
+use std::{iter, sync::Arc};
+
+use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyVariableKind};
+use hir_def::{expr::ExprId, lang_item::LangItemTarget};
+use stdx::always;
+use syntax::SmolStr;
+
+use crate::{
+ autoderef::{Autoderef, AutoderefKind},
+ db::HirDatabase,
+ infer::{
+ Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
+ TypeError, TypeMismatch,
+ },
+ static_lifetime, Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner,
+ Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::unify::InferenceTable;
+
+pub(crate) type CoerceResult = Result<InferOk<(Vec<Adjustment>, Ty)>, TypeError>;
+
+/// Do not require any adjustments, i.e. coerce `x -> x`.
+fn identity(_: Ty) -> Vec<Adjustment> {
+ vec![]
+}
+
+fn simple(kind: Adjust) -> impl FnOnce(Ty) -> Vec<Adjustment> {
+ move |target| vec![Adjustment { kind, target }]
+}
+
+/// This always returns `Ok(...)`.
+fn success(
+ adj: Vec<Adjustment>,
+ target: Ty,
+ goals: Vec<InEnvironment<Goal<Interner>>>,
+) -> CoerceResult {
+ Ok(InferOk { goals, value: (adj, target) })
+}
+
+#[derive(Clone, Debug)]
+pub(super) struct CoerceMany {
+ expected_ty: Ty,
+}
+
+impl CoerceMany {
+ pub(super) fn new(expected: Ty) -> Self {
+ CoerceMany { expected_ty: expected }
+ }
+
+ /// Merge two types from different branches, with possible coercion.
+ ///
+ /// Mostly this means trying to coerce one to the other, but
+ /// - if we have two function types for different functions or closures, we need to
+ /// coerce both to function pointers;
+ /// - if we were concerned with lifetime subtyping, we'd need to look for a
+ /// least upper bound.
+ pub(super) fn coerce(
+ &mut self,
+ ctx: &mut InferenceContext<'_>,
+ expr: Option<ExprId>,
+ expr_ty: &Ty,
+ ) {
+ let expr_ty = ctx.resolve_ty_shallow(expr_ty);
+ self.expected_ty = ctx.resolve_ty_shallow(&self.expected_ty);
+
+ // Special case: two function types. Try to coerce both to
+ // pointers to have a chance at getting a match. See
+ // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
+ let sig = match (self.expected_ty.kind(Interner), expr_ty.kind(Interner)) {
+ (TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
+ // FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
+ // we should be coercing the closure to a fn pointer of the safety of the FnDef
+ cov_mark::hit!(coerce_fn_reification);
+ let sig =
+ self.expected_ty.callable_sig(ctx.db).expect("FnDef without callable sig");
+ Some(sig)
+ }
+ _ => None,
+ };
+ if let Some(sig) = sig {
+ let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
+ let result1 = ctx.table.coerce_inner(self.expected_ty.clone(), &target_ty);
+ let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
+ if let (Ok(result1), Ok(result2)) = (result1, result2) {
+ ctx.table.register_infer_ok(result1);
+ ctx.table.register_infer_ok(result2);
+ return self.expected_ty = target_ty;
+ }
+ }
+
+ // It might not seem like it, but order is important here: If the expected
+ // type is a type variable and the new one is `!`, trying it the other
+ // way around first would mean we make the type variable `!`, instead of
+ // just marking it as possibly diverging.
+ if ctx.coerce(expr, &expr_ty, &self.expected_ty).is_ok() {
+ /* self.expected_ty is already correct */
+ } else if ctx.coerce(expr, &self.expected_ty, &expr_ty).is_ok() {
+ self.expected_ty = expr_ty;
+ } else {
+ if let Some(id) = expr {
+ ctx.result.type_mismatches.insert(
+ id.into(),
+ TypeMismatch { expected: self.expected_ty.clone(), actual: expr_ty },
+ );
+ }
+ cov_mark::hit!(coerce_merge_fail_fallback);
+ /* self.expected_ty is already correct */
+ }
+ }
+
+ pub(super) fn complete(self) -> Ty {
+ self.expected_ty
+ }
+}
+
+pub fn could_coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ coerce(db, env, tys).is_ok()
+}
+
+pub(crate) fn coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = table.fresh_subst(tys.binders.as_slice(Interner));
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ let (adjustments, ty) = table.coerce(&ty1_with_vars, &ty2_with_vars)?;
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ // FIXME also map the types in the adjustments
+ Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
+}
+
+impl<'a> InferenceContext<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(super) fn coerce(
+ &mut self,
+ expr: Option<ExprId>,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<Ty, TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ let (adjustments, ty) = self.table.coerce(&from_ty, &to_ty)?;
+ if let Some(expr) = expr {
+ self.write_expr_adj(expr, adjustments);
+ }
+ Ok(ty)
+ }
+}
+
+impl<'a> InferenceTable<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(crate) fn coerce(
+ &mut self,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ match self.coerce_inner(from_ty, &to_ty) {
+ Ok(InferOk { value: (adjustments, ty), goals }) => {
+ self.register_infer_ok(InferOk { value: (), goals });
+ Ok((adjustments, ty))
+ }
+ Err(e) => {
+ // FIXME deal with error
+ Err(e)
+ }
+ }
+ }
+
+ fn coerce_inner(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ if from_ty.is_never() {
+ // Subtle: If we are coercing from `!` to `?T`, where `?T` is an unbound
+ // type variable, we want `?T` to fallback to `!` if not
+ // otherwise constrained. An example where this arises:
+ //
+ // let _: Option<?T> = Some({ return; });
+ //
+ // here, we would coerce from `!` to `?T`.
+ if let TyKind::InferenceVar(tv, TyVariableKind::General) = to_ty.kind(Interner) {
+ self.set_diverging(*tv, true);
+ }
+ return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]);
+ }
+
+ // Consider coercing the subtype to a DST
+ if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
+ return Ok(ret);
+ }
+
+ // Examine the supertype and consider auto-borrowing.
+ match to_ty.kind(Interner) {
+ TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
+ TyKind::Ref(mt, _, _) => return self.coerce_ref(from_ty, to_ty, *mt),
+ _ => {}
+ }
+
+ match from_ty.kind(Interner) {
+ TyKind::FnDef(..) => {
+ // Function items are coercible to any closure
+ // type; function pointers are not (that would
+ // require double indirection).
+ // Additionally, we permit coercion of function
+ // items to drop the unsafe qualifier.
+ self.coerce_from_fn_item(from_ty, to_ty)
+ }
+ TyKind::Function(from_fn_ptr) => {
+ // We permit coercion of fn pointers to drop the
+ // unsafe qualifier.
+ self.coerce_from_fn_pointer(from_ty.clone(), from_fn_ptr, to_ty)
+ }
+ TyKind::Closure(_, from_substs) => {
+ // Non-capturing closures are coercible to
+ // function pointers or unsafe function pointers.
+ // It cannot convert closures that require unsafe.
+ self.coerce_closure_to_fn(from_ty.clone(), from_substs, to_ty)
+ }
+ _ => {
+ // Otherwise, just use unification rules.
+ self.unify_and(&from_ty, to_ty, identity)
+ }
+ }
+ }
+
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and<F>(&mut self, t1: &Ty, t2: &Ty, f: F) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ self.try_unify(t1, t2)
+ .and_then(|InferOk { goals, .. }| success(f(t1.clone()), t1.clone(), goals))
+ }
+
+ fn coerce_ptr(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let (is_ref, from_mt, from_inner) = match from_ty.kind(Interner) {
+ TyKind::Ref(mt, _, ty) => (true, mt, ty),
+ TyKind::Raw(mt, ty) => (false, mt, ty),
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ // Check that the types which they point at are compatible.
+ let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner);
+
+ // Although references and unsafe ptrs have the same
+ // representation, we still register an Adjust::DerefRef so that
+ // regionck knows that the region for `a` must be valid here.
+ if is_ref {
+ self.unify_and(&from_raw, to_ty, |target| {
+ vec![
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)), target },
+ ]
+ })
+ } else if *from_mt != to_mt {
+ self.unify_and(
+ &from_raw,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::MutToConstPointer)),
+ )
+ } else {
+ self.unify_and(&from_raw, to_ty, identity)
+ }
+ }
+
+ /// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
+ /// To match `A` with `B`, autoderef will be performed,
+ /// calling `deref`/`deref_mut` where necessary.
+ fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let from_mt = match from_ty.kind(Interner) {
+ &TyKind::Ref(mt, _, _) => {
+ coerce_mutabilities(mt, to_mt)?;
+ mt
+ }
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ // NOTE: this code is mostly copied and adapted from rustc, and
+ // currently more complicated than necessary, carrying errors around
+ // etc.. This complication will become necessary when we actually track
+ // details of coercion errors though, so I think it's useful to leave
+ // the structure like it is.
+
+ let snapshot = self.snapshot();
+
+ let mut autoderef = Autoderef::new(self, from_ty.clone());
+ let mut first_error = None;
+ let mut found = None;
+
+ while let Some((referent_ty, autoderefs)) = autoderef.next() {
+ if autoderefs == 0 {
+ // Don't let this pass, otherwise it would cause
+ // &T to autoref to &&T.
+ continue;
+ }
+
+ // At this point, we have deref'd `a` to `referent_ty`. So
+ // imagine we are coercing from `&'a mut Vec<T>` to `&'b mut [T]`.
+ // In the autoderef loop for `&'a mut Vec<T>`, we would get
+ // three callbacks:
+ //
+ // - `&'a mut Vec<T>` -- 0 derefs, just ignore it
+ // - `Vec<T>` -- 1 deref
+ // - `[T]` -- 2 deref
+ //
+ // At each point after the first callback, we want to
+ // check to see whether this would match out target type
+ // (`&'b mut [T]`) if we autoref'd it. We can't just
+ // compare the referent types, though, because we still
+ // have to consider the mutability. E.g., in the case
+ // we've been considering, we have an `&mut` reference, so
+ // the `T` in `[T]` needs to be unified with equality.
+ //
+ // Therefore, we construct reference types reflecting what
+ // the types will be after we do the final auto-ref and
+ // compare those. Note that this means we use the target
+ // mutability [1], since it may be that we are coercing
+ // from `&mut T` to `&U`.
+ let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc
+ let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
+ match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
+ Ok(result) => {
+ found = Some(result.map(|()| derefd_from_ty));
+ break;
+ }
+ Err(err) => {
+ if first_error.is_none() {
+ first_error = Some(err);
+ }
+ }
+ }
+ }
+
+ // Extract type or return an error. We return the first error
+ // we got, which should be from relating the "base" type
+ // (e.g., in example above, the failure from relating `Vec<T>`
+ // to the target type), since that should be the least
+ // confusing.
+ let InferOk { value: ty, goals } = match found {
+ Some(d) => d,
+ None => {
+ self.rollback_to(snapshot);
+ let err = first_error.expect("coerce_borrowed_pointer had no error");
+ return Err(err);
+ }
+ };
+ if ty == from_ty && from_mt == Mutability::Not && autoderef.step_count() == 1 {
+ // As a special case, if we would produce `&'a *x`, that's
+ // a total no-op. We end up with the type `&'a T` just as
+ // we started with. In that case, just skip it
+ // altogether. This is just an optimization.
+ //
+ // Note that for `&mut`, we DO want to reborrow --
+ // otherwise, this would be a move, which might be an
+ // error. For example `foo(self.x)` where `self` and
+ // `self.x` both have `&mut `type would be a move of
+ // `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
+ // which is a borrow.
+ always!(to_mt == Mutability::Not); // can only coerce &T -> &U
+ return success(vec![], ty, goals);
+ }
+
+ let mut adjustments = auto_deref_adjust_steps(&autoderef);
+ adjustments
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() });
+
+ success(adjustments, ty, goals)
+ }
+
+ /// Attempts to coerce from the type of a Rust function item into a function pointer.
+ fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let from_sig = from_ty.callable_sig(self.db).expect("FnDef had no sig");
+
+ // FIXME check ABI: Intrinsics are not coercible to function pointers
+ // FIXME Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396)
+
+ // FIXME rustc normalizes assoc types in the sig here, not sure if necessary
+
+ let from_sig = from_sig.to_fn_ptr();
+ let from_fn_pointer = TyKind::Function(from_sig.clone()).intern(Interner);
+ let ok = self.coerce_from_safe_fn(
+ from_fn_pointer.clone(),
+ &from_sig,
+ to_ty,
+ |unsafe_ty| {
+ vec![
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::ReifyFnPointer),
+ target: from_fn_pointer,
+ },
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::UnsafeFnPointer),
+ target: unsafe_ty,
+ },
+ ]
+ },
+ simple(Adjust::Pointer(PointerCast::ReifyFnPointer)),
+ )?;
+
+ Ok(ok)
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ fn coerce_from_fn_pointer(
+ &mut self,
+ from_ty: Ty,
+ from_f: &FnPointer,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ self.coerce_from_safe_fn(
+ from_ty,
+ from_f,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)),
+ identity,
+ )
+ }
+
+ fn coerce_from_safe_fn<F, G>(
+ &mut self,
+ from_ty: Ty,
+ from_fn_ptr: &FnPointer,
+ to_ty: &Ty,
+ to_unsafe: F,
+ normal: G,
+ ) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ G: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) {
+ if let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) =
+ (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety)
+ {
+ let from_unsafe =
+ TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner);
+ return self.unify_and(&from_unsafe, to_ty, to_unsafe);
+ }
+ }
+ self.unify_and(&from_ty, to_ty, normal)
+ }
+
+ /// Attempts to coerce from the type of a non-capturing closure into a
+ /// function pointer.
+ fn coerce_closure_to_fn(
+ &mut self,
+ from_ty: Ty,
+ from_substs: &Substitution,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ // if from_substs is non-capturing (FIXME)
+ TyKind::Function(fn_ty) => {
+ // We coerce the closure, which has fn type
+ // `extern "rust-call" fn((arg0,arg1,...)) -> _`
+ // to
+ // `fn(arg0,arg1,...) -> _`
+ // or
+ // `unsafe fn(arg0,arg1,...) -> _`
+ let safety = fn_ty.sig.safety;
+ let pointer_ty = coerce_closure_fn_ty(from_substs, safety);
+ self.unify_and(
+ &pointer_ty,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::ClosureFnPointer(safety))),
+ )
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
+ ///
+ /// See: <https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html>
+ fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> CoerceResult {
+ // These 'if' statements require some explanation.
+ // The `CoerceUnsized` trait is special - it is only
+ // possible to write `impl CoerceUnsized<B> for A` where
+ // A and B have 'matching' fields. This rules out the following
+ // two types of blanket impls:
+ //
+ // `impl<T> CoerceUnsized<T> for SomeType`
+ // `impl<T> CoerceUnsized<SomeType> for T`
+ //
+ // Both of these trigger a special `CoerceUnsized`-related error (E0376)
+ //
+ // We can take advantage of this fact to avoid performing unnecessary work.
+ // If either `source` or `target` is a type variable, then any applicable impl
+ // would need to be generic over the self-type (`impl<T> CoerceUnsized<SomeType> for T`)
+ // or generic over the `CoerceUnsized` type parameter (`impl<T> CoerceUnsized<T> for
+ // SomeType`).
+ //
+ // However, these are exactly the kinds of impls which are forbidden by
+ // the compiler! Therefore, we can be sure that coercion will always fail
+ // when either the source or target type is a type variable. This allows us
+ // to skip performing any trait selection, and immediately bail out.
+ if from_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+ if to_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+
+ // Handle reborrows before trying to solve `Source: CoerceUnsized<Target>`.
+ let reborrow = match (from_ty.kind(Interner), to_ty.kind(Interner)) {
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ let lt = static_lifetime();
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)),
+ target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Raw(to_mt, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)),
+ target: TyKind::Raw(to_mt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ _ => None,
+ };
+ let coerce_from =
+ reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone());
+
+ let krate = self.trait_env.krate;
+ let coerce_unsized_trait =
+ match self.db.lang_item(krate, SmolStr::new_inline("coerce_unsized")) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return Err(TypeError),
+ };
+
+ let coerce_unsized_tref = {
+ let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait);
+ if b.remaining() != 2 {
+ // The CoerceUnsized trait should have two generic params: Self and T.
+ return Err(TypeError);
+ }
+ b.push(coerce_from).push(to_ty.clone()).build()
+ };
+
+ let goal: InEnvironment<DomainGoal> =
+ InEnvironment::new(&self.trait_env.env, coerce_unsized_tref.cast(Interner));
+
+ let canonicalized = self.canonicalize(goal);
+
+ // FIXME: rustc's coerce_unsized is more specialized -- it only tries to
+ // solve `CoerceUnsized` and `Unsize` goals at this point and leaves the
+ // rest for later. Also, there's some logic about sized type variables.
+ // Need to find out in what cases this is necessary
+ let solution = self
+ .db
+ .trait_solve(krate, canonicalized.value.clone().cast(Interner))
+ .ok_or(TypeError)?;
+
+ match solution {
+ Solution::Unique(v) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: v.binders,
+ // FIXME handle constraints
+ value: v.value.subst,
+ },
+ );
+ }
+ Solution::Ambig(Guidance::Definite(subst)) => {
+ // FIXME need to record an obligation here
+ canonicalized.apply_solution(self, subst)
+ }
+ // FIXME actually we maybe should also accept unknown guidance here
+ _ => return Err(TypeError),
+ };
+ let unsize =
+ Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: to_ty.clone() };
+ let adjustments = match reborrow {
+ None => vec![unsize],
+ Some((deref, autoref)) => vec![deref, autoref, unsize],
+ };
+ success(adjustments, to_ty.clone(), vec![])
+ }
+}
+
+fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty {
+ let closure_sig = closure_substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ match closure_sig.kind(Interner) {
+ TyKind::Function(fn_ty) => TyKind::Function(FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety, ..fn_ty.sig },
+ substitution: fn_ty.substitution.clone(),
+ })
+ .intern(Interner),
+ _ => TyKind::Error.intern(Interner),
+ }
+}
+
+fn safe_to_unsafe_fn_ty(fn_ty: FnPointer) -> FnPointer {
+ FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety: chalk_ir::Safety::Unsafe, ..fn_ty.sig },
+ substitution: fn_ty.substitution,
+ }
+}
+
+fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError> {
+ match (from, to) {
+ (Mutability::Mut, Mutability::Mut | Mutability::Not)
+ | (Mutability::Not, Mutability::Not) => Ok(()),
+ (Mutability::Not, Mutability::Mut) => Err(TypeError),
+ }
+}
+
+pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adjustment> {
+ let steps = autoderef.steps();
+ let targets =
+ steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty()));
+ steps
+ .iter()
+ .map(|(kind, _source)| match kind {
+ // We do not know what kind of deref we require at this point yet
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ })
+ .zip(targets)
+ .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
new file mode 100644
index 000000000..d164e64a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -0,0 +1,1527 @@
+//! Type inference for expressions.
+
+use std::{
+ collections::hash_map::Entry,
+ iter::{repeat, repeat_with},
+ mem,
+};
+
+use chalk_ir::{
+ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
+};
+use hir_def::{
+ expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp},
+ generics::TypeOrConstParamData,
+ path::{GenericArg, GenericArgs},
+ resolver::resolver_for_expr,
+ ConstParamId, FieldId, FunctionId, ItemContainerId, Lookup,
+};
+use hir_expand::name::{name, Name};
+use stdx::always;
+use syntax::ast::RangeOp;
+
+use crate::{
+ autoderef::{self, Autoderef},
+ consteval,
+ infer::coerce::CoerceMany,
+ lower::{
+ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
+ },
+ mapping::{from_chalk, ToChalk},
+ method_resolution::{self, VisibleFromModule},
+ primitive::{self, UintTy},
+ static_lifetime, to_chalk_trait_id,
+ utils::{generics, Generics},
+ AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar,
+ Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::{
+ coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
+ Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(tgt_expr, expected);
+ if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
+ let could_unify = self.unify(&ty, &expected_ty);
+ if !could_unify {
+ self.result.type_mismatches.insert(
+ tgt_expr.into(),
+ TypeMismatch { expected: expected_ty, actual: ty.clone() },
+ );
+ }
+ }
+ ty
+ }
+
+ /// Infer type of expression with possibly implicit coerce to the expected type.
+ /// Return the type after possible coercion.
+ pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, expected);
+ if let Some(target) = expected.only_has_type(&mut self.table) {
+ match self.coerce(Some(expr), &ty, &target) {
+ Ok(res) => res,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: target.clone(), actual: ty.clone() },
+ );
+ target
+ }
+ }
+ } else {
+ ty
+ }
+ }
+
+ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ self.db.unwind_if_cancelled();
+
+ let ty = match &self.body[tgt_expr] {
+ Expr::Missing => self.err_ty(),
+ &Expr::If { condition, then_branch, else_branch } => {
+ self.infer_expr(
+ condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+
+ let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut both_arms_diverge = Diverges::Always;
+
+ let result_ty = self.table.new_type_var();
+ let then_ty = self.infer_expr_inner(then_branch, expected);
+ both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut coerce = CoerceMany::new(result_ty);
+ coerce.coerce(self, Some(then_branch), &then_ty);
+ let else_ty = match else_branch {
+ Some(else_branch) => self.infer_expr_inner(else_branch, expected),
+ None => TyBuilder::unit(),
+ };
+ both_arms_diverge &= self.diverges;
+ // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
+ coerce.coerce(self, else_branch, &else_ty);
+
+ self.diverges = condition_diverges | both_arms_diverge;
+
+ coerce.complete()
+ }
+ &Expr::Let { pat, expr } => {
+ let input_ty = self.infer_expr(expr, &Expectation::none());
+ self.infer_pat(pat, &input_ty, BindingMode::default());
+ TyKind::Scalar(Scalar::Bool).intern(Interner)
+ }
+ Expr::Block { statements, tail, label, id: _ } => {
+ let old_resolver = mem::replace(
+ &mut self.resolver,
+ resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
+ );
+ let ty = match label {
+ Some(_) => {
+ let break_ty = self.table.new_type_var();
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(break_ty.clone()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let ty = self.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ &Expectation::has_type(break_ty),
+ );
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+ if ctxt.may_break {
+ ctxt.coerce.complete()
+ } else {
+ ty
+ }
+ }
+ None => self.infer_block(tgt_expr, statements, *tail, expected),
+ };
+ self.resolver = old_resolver;
+ ty
+ }
+ Expr::Unsafe { body } | Expr::Const { body } => self.infer_expr(*body, expected),
+ Expr::TryBlock { body } => {
+ let _inner = self.infer_expr(*body, expected);
+ // FIXME should be std::result::Result<{inner}, _>
+ self.err_ty()
+ }
+ Expr::Async { body } => {
+ let ret_ty = self.table.new_type_var();
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ let inner_ty = self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ // Use the first type parameter as the output type of future.
+ // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
+ let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
+ .intern(Interner)
+ }
+ Expr::Loop { body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.table.new_type_var()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+
+ if ctxt.may_break {
+ self.diverges = Diverges::Maybe;
+ ctxt.coerce.complete()
+ } else {
+ TyKind::Never.intern(Interner)
+ }
+ }
+ Expr::While { condition, body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(
+ *condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::For { iterable, body, pat, label } => {
+ let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
+
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let pat_ty =
+ self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
+
+ self.infer_pat(*pat, &pat_ty, BindingMode::default());
+
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::Closure { body, args, ret_type, arg_types } => {
+ assert_eq!(args.len(), arg_types.len());
+
+ let mut sig_tys = Vec::new();
+
+ // collect explicitly written argument types
+ for arg_type in arg_types.iter() {
+ let arg_ty = match arg_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(arg_ty);
+ }
+
+ // add return type
+ let ret_ty = match ret_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(ret_ty.clone());
+ let sig_ty = TyKind::Function(FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(
+ Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner),
+ ),
+ })
+ .intern(Interner);
+ let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
+ let closure_ty =
+ TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
+ .intern(Interner);
+
+ // Eagerly try to relate the closure type with the expected
+ // type, otherwise we often won't have enough information to
+ // infer the body.
+ self.deduce_closure_type_from_expectations(
+ tgt_expr,
+ &closure_ty,
+ &sig_ty,
+ expected,
+ );
+
+ // Now go through the argument patterns
+ for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
+ self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
+ }
+
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ closure_ty
+ }
+ Expr::Call { callee, args, .. } => {
+ let callee_ty = self.infer_expr(*callee, &Expectation::none());
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
+ let mut res = None;
+ let mut derefed_callee = callee_ty.clone();
+ // manual loop to be able to access `derefs.table`
+ while let Some((callee_deref_ty, _)) = derefs.next() {
+ res = derefs.table.callable_sig(&callee_deref_ty, args.len());
+ if res.is_some() {
+ derefed_callee = callee_deref_ty;
+ break;
+ }
+ }
+ // if the function is unresolved, we use is_varargs=true to
+ // suppress the arg count diagnostic here
+ let is_varargs =
+ derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs)
+ || res.is_none();
+ let (param_tys, ret_ty) = match res {
+ Some(res) => {
+ let adjustments = auto_deref_adjust_steps(&derefs);
+ self.write_expr_adj(*callee, adjustments);
+ res
+ }
+ None => (Vec::new(), self.err_ty()), // FIXME diagnostic
+ };
+ let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
+ self.register_obligations_for_call(&callee_ty);
+
+ let expected_inputs = self.expected_inputs_for_expected_output(
+ expected,
+ ret_ty.clone(),
+ param_tys.clone(),
+ );
+
+ self.check_call_arguments(
+ tgt_expr,
+ args,
+ &expected_inputs,
+ &param_tys,
+ &indices_to_skip,
+ is_varargs,
+ );
+ self.normalize_associated_types_in(ret_ty)
+ }
+ Expr::MethodCall { receiver, args, method_name, generic_args } => self
+ .infer_method_call(
+ tgt_expr,
+ *receiver,
+ args,
+ method_name,
+ generic_args.as_deref(),
+ expected,
+ ),
+ Expr::Match { expr, arms } => {
+ let input_ty = self.infer_expr(*expr, &Expectation::none());
+
+ let expected = expected.adjust_for_branches(&mut self.table);
+
+ let result_ty = if arms.is_empty() {
+ TyKind::Never.intern(Interner)
+ } else {
+ match &expected {
+ Expectation::HasType(ty) => ty.clone(),
+ _ => self.table.new_type_var(),
+ }
+ };
+ let mut coerce = CoerceMany::new(result_ty);
+
+ let matchee_diverges = self.diverges;
+ let mut all_arms_diverge = Diverges::Always;
+
+ for arm in arms.iter() {
+ self.diverges = Diverges::Maybe;
+ let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
+ if let Some(guard_expr) = arm.guard {
+ self.infer_expr(
+ guard_expr,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ }
+
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ coerce.coerce(self, Some(arm.expr), &arm_ty);
+ }
+
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ coerce.complete()
+ }
+ Expr::Path(p) => {
+ // FIXME this could be more efficient...
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
+ self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Expr::Continue { .. } => TyKind::Never.intern(Interner),
+ Expr::Break { expr, label } => {
+ let mut coerce = match find_breakable(&mut self.breakables, label.as_ref()) {
+ Some(ctxt) => {
+ // avoiding the borrowck
+ mem::replace(
+ &mut ctxt.coerce,
+ CoerceMany::new(self.result.standard_types.unknown.clone()),
+ )
+ }
+ None => CoerceMany::new(self.result.standard_types.unknown.clone()),
+ };
+
+ let val_ty = if let Some(expr) = *expr {
+ self.infer_expr(expr, &Expectation::none())
+ } else {
+ TyBuilder::unit()
+ };
+
+ // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
+ coerce.coerce(self, *expr, &val_ty);
+
+ if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
+ ctxt.coerce = coerce;
+ ctxt.may_break = true;
+ } else {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ });
+ };
+
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
+ } else {
+ let unit = TyBuilder::unit();
+ let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Yield { expr } => {
+ // FIXME: track yield type for coercion
+ if let Some(expr) = expr {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::RecordLit { path, fields, spread, .. } => {
+ let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
+ if let Some(variant) = def_id {
+ self.write_variant_resolution(tgt_expr.into(), variant);
+ }
+
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ self.unify(&ty, &t);
+ }
+
+ let substs = ty
+ .as_adt()
+ .map(|(_, s)| s.clone())
+ .unwrap_or_else(|| Substitution::empty(Interner));
+ let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let variant_data = def_id.map(|it| it.variant_data(self.db.upcast()));
+ for field in fields.iter() {
+ let field_def =
+ variant_data.as_ref().and_then(|it| match it.field(&field.name) {
+ Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::NoSuchField {
+ expr: field.expr,
+ });
+ None
+ }
+ });
+ let field_ty = field_def.map_or(self.err_ty(), |it| {
+ field_types[it.local_id].clone().substitute(Interner, &substs)
+ });
+ self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
+ }
+ if let Some(expr) = spread {
+ self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
+ }
+ ty
+ }
+ Expr::Field { expr, name } => {
+ let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
+
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
+ let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
+ let (field_id, parameters) = match derefed_ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ return name.as_tuple_index().and_then(|idx| {
+ substs
+ .as_slice(Interner)
+ .get(idx)
+ .map(|a| a.assert_ty_ref(Interner))
+ .cloned()
+ });
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
+ let local_id = self.db.struct_data(*s).variant_data.field(name)?;
+ let field = FieldId { parent: (*s).into(), local_id };
+ (field, parameters.clone())
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
+ let local_id = self.db.union_data(*u).variant_data.field(name)?;
+ let field = FieldId { parent: (*u).into(), local_id };
+ (field, parameters.clone())
+ }
+ _ => return None,
+ };
+ let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
+ .is_visible_from(self.db.upcast(), self.resolver.module());
+ if !is_visible {
+ // Write down the first field resolution even if it is not visible
+ // This aids IDE features for private fields like goto def and in
+ // case of autoderef finding an applicable field, this will be
+ // overwritten in a following cycle
+ if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr)
+ {
+ entry.insert(field_id);
+ }
+ return None;
+ }
+ // can't have `write_field_resolution` here because `self.table` is borrowed :(
+ self.result.field_resolutions.insert(tgt_expr, field_id);
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &parameters);
+ Some(ty)
+ });
+ let ty = match ty {
+ Some(ty) => {
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ self.write_expr_adj(*expr, adjustments);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ ty
+ }
+ _ => self.err_ty(),
+ };
+ ty
+ }
+ Expr::Await { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
+ }
+ Expr::Try { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
+ }
+ Expr::Cast { expr, type_ref } => {
+ // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
+ let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let cast_ty = self.make_ty(type_ref);
+ // FIXME check the cast...
+ cast_ty
+ }
+ Expr::Ref { expr, rawness, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .and_then(|t| t.as_reference_or_ptr())
+ {
+ if exp_mutability == Mutability::Mut && mutability == Mutability::Not {
+ // FIXME: record type error - expected mut reference but found shared ref,
+ // which cannot be coerced
+ }
+ if exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
+ // FIXME: record type error - expected reference but found ptr,
+ // which cannot be coerced
+ }
+ Expectation::rvalue_hint(&mut self.table, Ty::clone(exp_inner))
+ } else {
+ Expectation::none()
+ };
+ let inner_ty = self.infer_expr_inner(*expr, &expectation);
+ match rawness {
+ Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
+ Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
+ }
+ .intern(Interner)
+ }
+ &Expr::Box { expr } => self.infer_expr_box(expr, expected),
+ Expr::UnaryOp { expr, op } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let inner_ty = self.resolve_ty_shallow(&inner_ty);
+ match op {
+ UnaryOp::Deref => {
+ autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
+ }
+ UnaryOp::Neg => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_))
+ | TyKind::InferenceVar(
+ _,
+ TyVariableKind::Integer | TyVariableKind::Float,
+ ) => inner_ty,
+ // Otherwise we resolve via the std::ops::Neg trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
+ }
+ }
+ UnaryOp::Not => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Bool | Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer) => inner_ty,
+ // Otherwise we resolve via the std::ops::Not trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
+ }
+ }
+ }
+ }
+ Expr::BinaryOp { lhs, rhs, op } => match op {
+ Some(BinaryOp::Assignment { op: None }) => {
+ let lhs = *lhs;
+ let is_ordinary = match &self.body[lhs] {
+ Expr::Array(_)
+ | Expr::RecordLit { .. }
+ | Expr::Tuple { .. }
+ | Expr::Underscore => false,
+ Expr::Call { callee, .. } => !matches!(&self.body[*callee], Expr::Path(_)),
+ _ => true,
+ };
+
+ // In ordinary (non-destructuring) assignments, the type of
+ // `lhs` must be inferred first so that the ADT fields
+ // instantiations in RHS can be coerced to it. Note that this
+ // cannot happen in destructuring assignments because of how
+ // they are desugared.
+ if is_ordinary {
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+ self.infer_expr_coerce(*rhs, &Expectation::has_type(lhs_ty));
+ } else {
+ let rhs_ty = self.infer_expr(*rhs, &Expectation::none());
+ self.infer_assignee_expr(lhs, &rhs_ty);
+ }
+ self.result.standard_types.unit.clone()
+ }
+ Some(BinaryOp::LogicOp(_)) => {
+ let bool_ty = self.result.standard_types.bool_.clone();
+ self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty.clone()));
+ let lhs_diverges = self.diverges;
+ self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty.clone()));
+ // Depending on the LHS' value, the RHS can never execute.
+ self.diverges = lhs_diverges;
+ bool_ty
+ }
+ Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr),
+ _ => self.err_ty(),
+ },
+ Expr::Range { lhs, rhs, range_type } => {
+ let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
+ let rhs_expect = lhs_ty
+ .as_ref()
+ .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
+ let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
+ match (range_type, lhs_ty, rhs_ty) {
+ (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
+ Some(adt) => TyBuilder::adt(self.db, adt).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, None, Some(ty)) => {
+ match self.resolve_range_to_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, Some(_), Some(ty)) => {
+ match self.resolve_range_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, _, None) => self.err_ty(),
+ }
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.infer_expr_inner(*base, &Expectation::none());
+ let index_ty = self.infer_expr(*index, &Expectation::none());
+
+ if let Some(index_trait) = self.resolve_ops_index() {
+ let canonicalized = self.canonicalize(base_ty.clone());
+ let receiver_adjustments = method_resolution::resolve_indexing_op(
+ self.db,
+ self.trait_env.clone(),
+ canonicalized.value,
+ index_trait,
+ );
+ let (self_ty, adj) = receiver_adjustments
+ .map_or((self.err_ty(), Vec::new()), |adj| {
+ adj.apply(&mut self.table, base_ty)
+ });
+ self.write_expr_adj(*base, adj);
+ self.resolve_associated_type_with_params(
+ self_ty,
+ self.resolve_ops_index_output(),
+ &[GenericArgData::Ty(index_ty).intern(Interner)],
+ )
+ } else {
+ self.err_ty()
+ }
+ }
+ Expr::Tuple { exprs, .. } => {
+ let mut tys = match expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .map(|t| t.kind(Interner))
+ {
+ Some(TyKind::Tuple(_, substs)) => substs
+ .iter(Interner)
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()))
+ .take(exprs.len())
+ .collect::<Vec<_>>(),
+ _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
+ };
+
+ for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
+ }
+
+ TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
+ }
+ Expr::Array(array) => {
+ let elem_ty =
+ match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
+ Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
+ _ => self.table.new_type_var(),
+ };
+ let mut coerce = CoerceMany::new(elem_ty.clone());
+
+ let expected = Expectation::has_type(elem_ty.clone());
+ let len = match array {
+ Array::ElementList { elements, .. } => {
+ for &expr in elements.iter() {
+ let cur_elem_ty = self.infer_expr_inner(expr, &expected);
+ coerce.coerce(self, Some(expr), &cur_elem_ty);
+ }
+ consteval::usize_const(Some(elements.len() as u128))
+ }
+ &Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
+ self.infer_expr(
+ repeat,
+ &Expectation::has_type(
+ TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
+ ),
+ );
+
+ if let Some(g_def) = self.owner.as_generic_def_id() {
+ let generics = generics(self.db.upcast(), g_def);
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ || generics,
+ DebruijnIndex::INNERMOST,
+ )
+ } else {
+ consteval::usize_const(None)
+ }
+ }
+ };
+
+ TyKind::Array(coerce.complete(), len).intern(Interner)
+ }
+ Expr::Literal(lit) => match lit {
+ Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ Literal::String(..) => {
+ TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner))
+ .intern(Interner)
+ }
+ Literal::ByteString(bs) => {
+ let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
+
+ let len = consteval::usize_const(Some(bs.len() as u128));
+
+ let array_type = TyKind::Array(byte_type, len).intern(Interner);
+ TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
+ }
+ Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(Interner),
+ Literal::Int(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Uint(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Float(_v, ty) => match ty {
+ Some(float_ty) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(*float_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_float_var(),
+ },
+ },
+ Expr::MacroStmts { tail, statements } => {
+ self.infer_block(tgt_expr, statements, *tail, expected)
+ }
+ Expr::Underscore => {
+ // Underscore expressions may only appear in assignee expressions,
+ // which are handled by `infer_assignee_expr()`, so any underscore
+ // expression reaching this branch is an error.
+ self.err_ty()
+ }
+ };
+ // use a new type variable if we got unknown here
+ let ty = self.insert_type_vars_shallow(ty);
+ self.write_expr_ty(tgt_expr, ty.clone());
+ if self.resolve_ty_shallow(&ty).is_never() {
+ // Any expression that produces a value of type `!` must have diverged
+ self.diverges = Diverges::Always;
+ }
+ ty
+ }
+
+ fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
+ if let Some(box_id) = self.resolve_boxed_box() {
+ let table = &mut self.table;
+ let inner_exp = expected
+ .to_option(table)
+ .as_ref()
+ .map(|e| e.as_adt())
+ .flatten()
+ .filter(|(e_adt, _)| e_adt == &box_id)
+ .map(|(_, subts)| {
+ let g = subts.at(Interner, 0);
+ Expectation::rvalue_hint(table, Ty::clone(g.assert_ty_ref(Interner)))
+ })
+ .unwrap_or_else(Expectation::none);
+
+ let inner_ty = self.infer_expr_inner(inner_expr, &inner_exp);
+ TyBuilder::adt(self.db, box_id)
+ .push(inner_ty)
+ .fill_with_defaults(self.db, || self.table.new_type_var())
+ .build()
+ } else {
+ self.err_ty()
+ }
+ }
+
+ pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
+ let is_rest_expr = |expr| {
+ matches!(
+ &self.body[expr],
+ Expr::Range { lhs: None, rhs: None, range_type: RangeOp::Exclusive },
+ )
+ };
+
+ let rhs_ty = self.resolve_ty_shallow(rhs_ty);
+
+ let ty = match &self.body[lhs] {
+ Expr::Tuple { exprs, .. } => {
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = exprs.iter().position(|e| is_rest_expr(*e));
+ let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs)
+ }
+ Expr::Call { callee, args, .. } => {
+ // Tuple structs
+ let path = match &self.body[*callee] {
+ Expr::Path(path) => Some(path),
+ _ => None,
+ };
+
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = args.iter().position(|e| is_rest_expr(*e));
+ let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args)
+ }
+ Expr::Array(Array::ElementList { elements, .. }) => {
+ let elem_ty = match rhs_ty.kind(Interner) {
+ TyKind::Array(st, _) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ // There's no need to handle `..` as it cannot be bound.
+ let sub_exprs = elements.iter().filter(|e| !is_rest_expr(**e));
+
+ for e in sub_exprs {
+ self.infer_assignee_expr(*e, &elem_ty);
+ }
+
+ match rhs_ty.kind(Interner) {
+ TyKind::Array(_, _) => rhs_ty.clone(),
+ // Even when `rhs_ty` is not an array type, this assignee
+ // expression is inferred to be an array (of unknown element
+ // type and length). This should not be just an error type,
+ // because we are to compute the unifiability of this type and
+ // `rhs_ty` in the end of this function to issue type mismatches.
+ _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
+ .intern(Interner),
+ }
+ }
+ Expr::RecordLit { path, fields, .. } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
+
+ self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
+ }
+ Expr::Underscore => rhs_ty.clone(),
+ _ => {
+ // `lhs` is a place expression, a unit struct, or an enum variant.
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+
+ // This is the only branch where this function may coerce any type.
+ // We are returning early to avoid the unifiability check below.
+ let lhs_ty = self.insert_type_vars_shallow(lhs_ty);
+ let ty = match self.coerce(None, &rhs_ty, &lhs_ty) {
+ Ok(ty) => ty,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ lhs.into(),
+ TypeMismatch { expected: rhs_ty.clone(), actual: lhs_ty.clone() },
+ );
+ // `rhs_ty` is returned so no further type mismatches are
+ // reported because of this mismatch.
+ rhs_ty
+ }
+ };
+ self.write_expr_ty(lhs, ty.clone());
+ return ty;
+ }
+ };
+
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &rhs_ty) {
+ self.result
+ .type_mismatches
+ .insert(lhs.into(), TypeMismatch { expected: rhs_ty.clone(), actual: ty.clone() });
+ }
+ self.write_expr_ty(lhs, ty.clone());
+ ty
+ }
+
+ fn infer_overloadable_binop(
+ &mut self,
+ lhs: ExprId,
+ op: BinaryOp,
+ rhs: ExprId,
+ tgt_expr: ExprId,
+ ) -> Ty {
+ let lhs_expectation = Expectation::none();
+ let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
+ let rhs_ty = self.table.new_type_var();
+
+ let func = self.resolve_binop_method(op);
+ let func = match func {
+ Some(func) => func,
+ None => {
+ let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
+ let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
+ return self
+ .builtin_binary_op_return_ty(op, lhs_ty, rhs_ty)
+ .unwrap_or_else(|| self.err_ty());
+ }
+ };
+
+ let subst = TyBuilder::subst_for_def(self.db, func)
+ .push(lhs_ty.clone())
+ .push(rhs_ty.clone())
+ .build();
+ self.write_method_resolution(tgt_expr, func, subst.clone());
+
+ let method_ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
+ self.register_obligations_for_call(&method_ty);
+
+ self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone()));
+
+ let ret_ty = match method_ty.callable_sig(self.db) {
+ Some(sig) => sig.ret().clone(),
+ None => self.err_ty(),
+ };
+
+ let ret_ty = self.normalize_associated_types_in(ret_ty);
+
+ // FIXME: record autoref adjustments
+
+ // use knowledge of built-in binary ops, which can sometimes help inference
+ if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) {
+ self.unify(&builtin_rhs, &rhs_ty);
+ }
+ if let Some(builtin_ret) = self.builtin_binary_op_return_ty(op, lhs_ty, rhs_ty) {
+ self.unify(&builtin_ret, &ret_ty);
+ }
+
+ ret_ty
+ }
+
+ fn infer_block(
+ &mut self,
+ expr: ExprId,
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ expected: &Expectation,
+ ) -> Ty {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer, else_branch } => {
+ let decl_ty = type_ref
+ .as_ref()
+ .map(|tr| self.make_ty(tr))
+ .unwrap_or_else(|| self.err_ty());
+
+ // Always use the declared type when specified
+ let mut ty = decl_ty.clone();
+
+ if let Some(expr) = initializer {
+ let actual_ty =
+ self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
+ if decl_ty.is_unknown() {
+ ty = actual_ty;
+ }
+ }
+
+ if let Some(expr) = else_branch {
+ self.infer_expr_coerce(
+ *expr,
+ &Expectation::has_type(Ty::new(Interner, TyKind::Never)),
+ );
+ }
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ Statement::Expr { expr, .. } => {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ }
+ }
+
+ if let Some(expr) = tail {
+ self.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if self.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ self.table.new_maybe_never_var()
+ } else {
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
+ );
+ }
+ t
+ } else {
+ TyBuilder::unit()
+ }
+ }
+ }
+ }
+
+ fn infer_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ args: &[ExprId],
+ method_name: &Name,
+ generic_args: Option<&GenericArgs>,
+ expected: &Expectation,
+ ) -> Ty {
+ let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ let resolved = method_resolution::lookup_method(
+ &canonicalized_receiver.value,
+ self.db,
+ self.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ method_name,
+ );
+ let (receiver_ty, method_ty, substs) = match resolved {
+ Some((adjust, func)) => {
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, generic_args);
+ self.write_expr_adj(receiver, adjustments);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+ (ty, self.db.value_ty(func.into()), substs)
+ }
+ None => (
+ receiver_ty,
+ Binders::empty(Interner, self.err_ty()),
+ Substitution::empty(Interner),
+ ),
+ };
+ let method_ty = method_ty.substitute(Interner, &substs);
+ self.register_obligations_for_call(&method_ty);
+ let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+ match method_ty.callable_sig(self.db) {
+ Some(sig) => {
+ if !sig.params().is_empty() {
+ (
+ sig.params()[0].clone(),
+ sig.params()[1..].to_vec(),
+ sig.ret().clone(),
+ sig.is_varargs,
+ )
+ } else {
+ (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
+ }
+ }
+ None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+ };
+ self.unify(&formal_receiver_ty, &receiver_ty);
+
+ let expected_inputs =
+ self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone());
+
+ self.check_call_arguments(tgt_expr, args, &expected_inputs, &param_tys, &[], is_varargs);
+ self.normalize_associated_types_in(ret_ty)
+ }
+
+ fn expected_inputs_for_expected_output(
+ &mut self,
+ expected_output: &Expectation,
+ output: Ty,
+ inputs: Vec<Ty>,
+ ) -> Vec<Ty> {
+ if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
+ self.table.fudge_inference(|table| {
+ if table.try_unify(&expected_ty, &output).is_ok() {
+ table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
+ chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner),
+ chalk_ir::VariableKind::Lifetime => {
+ var.to_lifetime(Interner).cast(Interner)
+ }
+ chalk_ir::VariableKind::Const(ty) => {
+ var.to_const(Interner, ty).cast(Interner)
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn check_call_arguments(
+ &mut self,
+ expr: ExprId,
+ args: &[ExprId],
+ expected_inputs: &[Ty],
+ param_tys: &[Ty],
+ skip_indices: &[u32],
+ is_varargs: bool,
+ ) {
+ if args.len() != param_tys.len() + skip_indices.len() && !is_varargs {
+ self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
+ call_expr: expr,
+ expected: param_tys.len() + skip_indices.len(),
+ found: args.len(),
+ });
+ }
+
+ // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
+ // We do this in a pretty awful way: first we type-check any arguments
+ // that are not closures, then we type-check the closures. This is so
+ // that we have more information about the types of arguments when we
+ // type-check the functions. This isn't really the right way to do this.
+ for &check_closures in &[false, true] {
+ let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
+ let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
+ let expected_iter = expected_inputs
+ .iter()
+ .cloned()
+ .chain(param_iter.clone().skip(expected_inputs.len()));
+ for (idx, ((&arg, param_ty), expected_ty)) in
+ args.iter().zip(param_iter).zip(expected_iter).enumerate()
+ {
+ let is_closure = matches!(&self.body[arg], Expr::Closure { .. });
+ if is_closure != check_closures {
+ continue;
+ }
+
+ while skip_indices.peek().map_or(false, |i| *i < idx as u32) {
+ skip_indices.next();
+ }
+ if skip_indices.peek().copied() == Some(idx as u32) {
+ continue;
+ }
+
+ // the difference between param_ty and expected here is that
+ // expected is the parameter when the expected *return* type is
+ // taken into account. So in `let _: &[i32] = identity(&[1, 2])`
+ // the expected type is already `&[i32]`, whereas param_ty is
+ // still an unbound type variable. We don't always want to force
+ // the parameter to coerce to the expected type (for example in
+ // `coerce_unsize_expected_type_4`).
+ let param_ty = self.normalize_associated_types_in(param_ty);
+ let expected = Expectation::rvalue_hint(&mut self.table, expected_ty);
+ // infer with the expected type we have...
+ let ty = self.infer_expr_inner(arg, &expected);
+
+ // then coerce to either the expected type or just the formal parameter type
+ let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) {
+ // if we are coercing to the expectation, unify with the
+ // formal parameter type to connect everything
+ self.unify(&ty, &param_ty);
+ ty
+ } else {
+ param_ty
+ };
+ if !coercion_target.is_unknown() {
+ if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
+ self.result.type_mismatches.insert(
+ arg.into(),
+ TypeMismatch { expected: coercion_target, actual: ty.clone() },
+ );
+ }
+ }
+ }
+ }
+ }
+
+ fn substs_for_method_call(
+ &mut self,
+ def_generics: Generics,
+ generic_args: Option<&GenericArgs>,
+ ) -> Substitution {
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ assert_eq!(self_params, 0); // method shouldn't have another Self param
+ let total_len = parent_params + type_params + const_params + impl_trait_params;
+ let mut substs = Vec::with_capacity(total_len);
+ // Parent arguments are unknown
+ for (id, param) in def_generics.iter_parent() {
+ match param {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner));
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ let ty = self.db.const_param_ty(ConstParamId::from_unchecked(id));
+ substs
+ .push(GenericArgData::Const(self.table.new_const_var(ty)).intern(Interner));
+ }
+ }
+ }
+ // handle provided arguments
+ if let Some(generic_args) = generic_args {
+ // if args are provided, it should be all of them, but we can't rely on that
+ for (arg, kind_id) in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .take(type_params + const_params)
+ .zip(def_generics.iter_id().skip(parent_params))
+ {
+ if let Some(g) = generic_arg_to_chalk(
+ self.db,
+ kind_id,
+ arg,
+ self,
+ |this, type_ref| this.make_ty(type_ref),
+ |this, c, ty| {
+ const_or_path_to_chalk(
+ this.db,
+ &this.resolver,
+ ty,
+ c,
+ ParamLoweringMode::Placeholder,
+ || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()),
+ DebruijnIndex::INNERMOST,
+ )
+ },
+ ) {
+ substs.push(g);
+ }
+ }
+ };
+ for (id, data) in def_generics.iter().skip(substs.len()) {
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner))
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ substs.push(
+ GenericArgData::Const(self.table.new_const_var(
+ self.db.const_param_ty(ConstParamId::from_unchecked(id)),
+ ))
+ .intern(Interner),
+ )
+ }
+ }
+ }
+ assert_eq!(substs.len(), total_len);
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
+ let callable_ty = self.resolve_ty_shallow(callable_ty);
+ if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
+ let def: CallableDefId = from_chalk(self.db, *fn_def);
+ let generic_predicates = self.db.generic_predicates(def.into());
+ for predicate in generic_predicates.iter() {
+ let (predicate, binders) = predicate
+ .clone()
+ .substitute(Interner, parameters)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(predicate.cast(Interner));
+ }
+ // add obligation for trait implementation, if this is a trait method
+ match def {
+ CallableDefId::FunctionId(f) => {
+ if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
+ // construct a TraitRef
+ let substs = crate::subst_prefix(
+ &*parameters,
+ generics(self.db.upcast(), trait_.into()).len(),
+ );
+ self.push_obligation(
+ TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }
+ .cast(Interner),
+ );
+ }
+ }
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
+ }
+ }
+ }
+
+ /// Returns the argument indices to skip.
+ fn check_legacy_const_generics(&mut self, callee: Ty, args: &[ExprId]) -> Box<[u32]> {
+ let (func, subst) = match callee.kind(Interner) {
+ TyKind::FnDef(fn_id, subst) => {
+ let callable = CallableDefId::from_chalk(self.db, *fn_id);
+ let func = match callable {
+ CallableDefId::FunctionId(f) => f,
+ _ => return Default::default(),
+ };
+ (func, subst)
+ }
+ _ => return Default::default(),
+ };
+
+ let data = self.db.function_data(func);
+ if data.legacy_const_generics_indices.is_empty() {
+ return Default::default();
+ }
+
+ // only use legacy const generics if the param count matches with them
+ if data.params.len() + data.legacy_const_generics_indices.len() != args.len() {
+ if args.len() <= data.params.len() {
+ return Default::default();
+ } else {
+ // there are more parameters than there should be without legacy
+ // const params; use them
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ return indices;
+ }
+ }
+
+ // check legacy const parameters
+ for (subst_idx, arg_idx) in data.legacy_const_generics_indices.iter().copied().enumerate() {
+ let arg = match subst.at(Interner, subst_idx).constant(Interner) {
+ Some(c) => c,
+ None => continue, // not a const parameter?
+ };
+ if arg_idx >= args.len() as u32 {
+ continue;
+ }
+ let _ty = arg.data(Interner).ty.clone();
+ let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
+ self.infer_expr(args[arg_idx as usize], &expected);
+ // FIXME: evaluate and unify with the const
+ }
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ indices
+ }
+
+ fn builtin_binary_op_return_ty(&mut self, op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Option<Ty> {
+ let lhs_ty = self.resolve_ty_shallow(&lhs_ty);
+ let rhs_ty = self.resolve_ty_shallow(&rhs_ty);
+ match op {
+ BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => {
+ Some(TyKind::Scalar(Scalar::Bool).intern(Interner))
+ }
+ BinaryOp::Assignment { .. } => Some(TyBuilder::unit()),
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
+ // all integer combinations are valid here
+ if matches!(
+ lhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) && matches!(
+ rhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) {
+ Some(lhs_ty)
+ } else {
+ None
+ }
+ }
+ BinaryOp::ArithOp(_) => match (lhs_ty.kind(Interner), rhs_ty.kind(Interner)) {
+ // (int, int) | (uint, uint) | (float, float)
+ (TyKind::Scalar(Scalar::Int(_)), TyKind::Scalar(Scalar::Int(_)))
+ | (TyKind::Scalar(Scalar::Uint(_)), TyKind::Scalar(Scalar::Uint(_)))
+ | (TyKind::Scalar(Scalar::Float(_)), TyKind::Scalar(Scalar::Float(_))) => {
+ Some(rhs_ty)
+ }
+ // ({int}, int) | ({int}, uint)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ ) => Some(rhs_ty),
+ // (int, {int}) | (uint, {int})
+ (
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ ) => Some(lhs_ty),
+ // ({float} | float)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::Scalar(Scalar::Float(_)),
+ ) => Some(rhs_ty),
+ // (float, {float})
+ (
+ TyKind::Scalar(Scalar::Float(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(lhs_ty),
+ // ({int}, {int}) | ({float}, {float})
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ )
+ | (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(rhs_ty),
+ _ => None,
+ },
+ }
+ }
+
+ fn builtin_binary_op_rhs_expectation(&mut self, op: BinaryOp, lhs_ty: Ty) -> Option<Ty> {
+ Some(match op {
+ BinaryOp::LogicOp(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BinaryOp::Assignment { op: None } => lhs_ty,
+ BinaryOp::CmpOp(CmpOp::Eq { .. }) => match self
+ .resolve_ty_shallow(&lhs_ty)
+ .kind(Interner)
+ {
+ TyKind::Scalar(_) | TyKind::Str => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => return None,
+ BinaryOp::CmpOp(CmpOp::Ord { .. })
+ | BinaryOp::Assignment { op: Some(_) }
+ | BinaryOp::ArithOp(_) => match self.resolve_ty_shallow(&lhs_ty).kind(Interner) {
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ })
+ }
+
+ fn resolve_binop_method(&self, op: BinaryOp) -> Option<FunctionId> {
+ let (name, lang_item) = match op {
+ BinaryOp::LogicOp(_) => return None,
+ BinaryOp::ArithOp(aop) => match aop {
+ ArithOp::Add => (name!(add), name!(add)),
+ ArithOp::Mul => (name!(mul), name!(mul)),
+ ArithOp::Sub => (name!(sub), name!(sub)),
+ ArithOp::Div => (name!(div), name!(div)),
+ ArithOp::Rem => (name!(rem), name!(rem)),
+ ArithOp::Shl => (name!(shl), name!(shl)),
+ ArithOp::Shr => (name!(shr), name!(shr)),
+ ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
+ ArithOp::BitOr => (name!(bitor), name!(bitor)),
+ ArithOp::BitAnd => (name!(bitand), name!(bitand)),
+ },
+ BinaryOp::Assignment { op: Some(aop) } => match aop {
+ ArithOp::Add => (name!(add_assign), name!(add_assign)),
+ ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
+ ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
+ ArithOp::Div => (name!(div_assign), name!(div_assign)),
+ ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
+ ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
+ ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
+ ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
+ ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
+ ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
+ },
+ BinaryOp::CmpOp(cop) => match cop {
+ CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
+ CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
+ (name!(le), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
+ (name!(lt), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
+ (name!(ge), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
+ (name!(gt), name!(partial_ord))
+ }
+ },
+ BinaryOp::Assignment { op: None } => return None,
+ };
+
+ let trait_ = self.resolve_lang_item(lang_item)?.as_trait()?;
+
+ self.db.trait_data(trait_).method_by_name(&name)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
new file mode 100644
index 000000000..5e7320a5d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -0,0 +1,354 @@
+//! Type inference for patterns.
+
+use std::iter::repeat_with;
+
+use chalk_ir::Mutability;
+use hir_def::{
+ expr::{BindingAnnotation, Expr, Literal, Pat, PatId},
+ path::Path,
+ type_ref::ConstScalar,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ consteval::intern_const_scalar,
+ infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
+ lower::lower_to_chalk_mutability,
+ static_lifetime, ConcreteConst, ConstValue, Interner, Substitution, Ty, TyBuilder, TyExt,
+ TyKind,
+};
+
+use super::PatLike;
+
+impl<'a> InferenceContext<'a> {
+ /// Infers type for tuple struct pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_struct_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, true);
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let (pre, post) = match ellipsis {
+ Some(idx) => subs.split_at(idx),
+ None => (subs, &[][..]),
+ };
+ let post_idx_offset = field_tys.iter().count().saturating_sub(post.len());
+
+ let pre_iter = pre.iter().enumerate();
+ let post_iter = (post_idx_offset..).zip(post.iter());
+ for (i, &subpat) in pre_iter.chain(post_iter) {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|d| d.field(&Name::new_tuple_field(i)))
+ .map_or(self.err_ty(), |field| {
+ field_tys[field].clone().substitute(Interner, &substs)
+ });
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+ T::infer(self, subpat, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for record pattern or its corresponding assignee expression.
+ pub(super) fn infer_record_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ subs: impl Iterator<Item = (Name, T)>,
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, false);
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+
+ for (name, inner) in subs {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|it| it.field(&name))
+ .map_or(self.err_ty(), |f| field_tys[f].clone().substitute(Interner, &substs));
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+
+ T::infer(self, inner, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for tuple pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_pat_like<T: PatLike>(
+ &mut self,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let expectations = match expected.as_tuple() {
+ Some(parameters) => &*parameters.as_slice(Interner),
+ _ => &[],
+ };
+
+ let ((pre, post), n_uncovered_patterns) = match ellipsis {
+ Some(idx) => (subs.split_at(idx), expectations.len().saturating_sub(subs.len())),
+ None => ((&subs[..], &[][..]), 0),
+ };
+ let mut expectations_iter = expectations
+ .iter()
+ .cloned()
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()));
+
+ let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + subs.len());
+
+ inner_tys.extend(expectations_iter.by_ref().take(n_uncovered_patterns + subs.len()));
+
+ // Process pre
+ for (ty, pat) in inner_tys.iter_mut().zip(pre) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ // Process post
+ for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+
+ pub(super) fn infer_pat(
+ &mut self,
+ pat: PatId,
+ expected: &Ty,
+ mut default_bm: BindingMode,
+ ) -> Ty {
+ let mut expected = self.resolve_ty_shallow(expected);
+
+ if is_non_ref_pat(&self.body, pat) {
+ let mut pat_adjustments = Vec::new();
+ while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
+ pat_adjustments.push(expected.clone());
+ expected = self.resolve_ty_shallow(inner);
+ default_bm = match default_bm {
+ BindingMode::Move => BindingMode::Ref(mutability),
+ BindingMode::Ref(Mutability::Not) => BindingMode::Ref(Mutability::Not),
+ BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
+ }
+ }
+
+ if !pat_adjustments.is_empty() {
+ pat_adjustments.shrink_to_fit();
+ self.result.pat_adjustments.insert(pat, pat_adjustments);
+ }
+ } else if let Pat::Ref { .. } = &self.body[pat] {
+ cov_mark::hit!(match_ergonomics_ref);
+ // When you encounter a `&pat` pattern, reset to Move.
+ // This is so that `w` is by value: `let (_, &w) = &(1, &2);`
+ default_bm = BindingMode::Move;
+ }
+
+ // Lose mutability.
+ let default_bm = default_bm;
+ let expected = expected;
+
+ let ty = match &self.body[pat] {
+ Pat::Tuple { args, ellipsis } => {
+ self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
+ }
+ Pat::Or(pats) => {
+ if let Some((first_pat, rest)) = pats.split_first() {
+ let ty = self.infer_pat(*first_pat, &expected, default_bm);
+ for pat in rest {
+ self.infer_pat(*pat, &expected, default_bm);
+ }
+ ty
+ } else {
+ self.err_ty()
+ }
+ }
+ Pat::Ref { pat, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = match expected.as_reference() {
+ Some((inner_ty, _lifetime, exp_mut)) => {
+ if mutability != exp_mut {
+ // FIXME: emit type error?
+ }
+ inner_ty.clone()
+ }
+ _ => self.result.standard_types.unknown.clone(),
+ };
+ let subty = self.infer_pat(*pat, &expectation, default_bm);
+ TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
+ }
+ Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
+ .infer_tuple_struct_pat_like(
+ p.as_deref(),
+ &expected,
+ default_bm,
+ pat,
+ *ellipsis,
+ subpats,
+ ),
+ Pat::Record { path: p, args: fields, ellipsis: _ } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
+ self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs)
+ }
+ Pat::Path(path) => {
+ // FIXME use correct resolver for the surrounding expression
+ let resolver = self.resolver.clone();
+ self.infer_path(&resolver, path, pat.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Pat::Bind { mode, name: _, subpat } => {
+ let mode = if mode == &BindingAnnotation::Unannotated {
+ default_bm
+ } else {
+ BindingMode::convert(*mode)
+ };
+ self.result.pat_binding_modes.insert(pat, mode);
+
+ let inner_ty = match subpat {
+ Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
+ None => expected,
+ };
+ let inner_ty = self.insert_type_vars_shallow(inner_ty);
+
+ let bound_ty = match mode {
+ BindingMode::Ref(mutability) => {
+ TyKind::Ref(mutability, static_lifetime(), inner_ty.clone())
+ .intern(Interner)
+ }
+ BindingMode::Move => inner_ty.clone(),
+ };
+ self.write_pat_ty(pat, bound_ty);
+ return inner_ty;
+ }
+ Pat::Slice { prefix, slice, suffix } => {
+ let elem_ty = match expected.kind(Interner) {
+ TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ for &pat_id in prefix.iter().chain(suffix.iter()) {
+ self.infer_pat(pat_id, &elem_ty, default_bm);
+ }
+
+ if let &Some(slice_pat_id) = slice {
+ let rest_pat_ty = match expected.kind(Interner) {
+ TyKind::Array(_, length) => {
+ let len = match length.data(Interner).value {
+ ConstValue::Concrete(ConcreteConst {
+ interned: ConstScalar::UInt(len),
+ }) => len.checked_sub((prefix.len() + suffix.len()) as u128),
+ _ => None,
+ };
+ TyKind::Array(
+ elem_ty.clone(),
+ intern_const_scalar(
+ len.map_or(ConstScalar::Unknown, |len| ConstScalar::UInt(len)),
+ TyBuilder::usize(),
+ ),
+ )
+ }
+ _ => TyKind::Slice(elem_ty.clone()),
+ }
+ .intern(Interner);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
+ }
+
+ match expected.kind(Interner) {
+ TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
+ _ => TyKind::Slice(elem_ty),
+ }
+ .intern(Interner)
+ }
+ Pat::Wild => expected.clone(),
+ Pat::Range { start, end } => {
+ let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
+ self.infer_expr(*end, &Expectation::has_type(start_ty))
+ }
+ Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
+ Pat::Box { inner } => match self.resolve_boxed_box() {
+ Some(box_adt) => {
+ let (inner_ty, alloc_ty) = match expected.as_adt() {
+ Some((adt, subst)) if adt == box_adt => (
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone(),
+ subst.as_slice(Interner).get(1).and_then(|a| a.ty(Interner).cloned()),
+ ),
+ _ => (self.result.standard_types.unknown.clone(), None),
+ };
+
+ let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm);
+ let mut b = TyBuilder::adt(self.db, box_adt).push(inner_ty);
+
+ if let Some(alloc_ty) = alloc_ty {
+ b = b.push(alloc_ty);
+ }
+ b.fill_with_defaults(self.db, || self.table.new_type_var()).build()
+ }
+ None => self.err_ty(),
+ },
+ Pat::ConstBlock(expr) => {
+ self.infer_expr(*expr, &Expectation::has_type(expected.clone()))
+ }
+ Pat::Missing => self.err_ty(),
+ };
+ // use a new type variable if we got error type here
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &expected) {
+ self.result
+ .type_mismatches
+ .insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
+ }
+ self.write_pat_ty(pat, ty.clone());
+ ty
+ }
+}
+
+fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
+ // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
+ Pat::Path(..) => true,
+ Pat::ConstBlock(..) => true,
+ Pat::Lit(expr) => !matches!(body[*expr], Expr::Literal(Literal::String(..))),
+ Pat::Bind {
+ mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated,
+ subpat: Some(subpat),
+ ..
+ } => is_non_ref_pat(body, *subpat),
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
new file mode 100644
index 000000000..f580e09e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -0,0 +1,295 @@
+//! Path expression resolution.
+
+use chalk_ir::cast::Cast;
+use hir_def::{
+ path::{Path, PathSegment},
+ resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
+ AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ builder::ParamKind,
+ consteval,
+ method_resolution::{self, VisibleFromModule},
+ GenericArgData, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+ ValueTyDefId,
+};
+
+use super::{ExprOrPatId, InferenceContext, TraitRef};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn infer_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let ty = self.resolve_value_path(resolver, path, id)?;
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ Some(ty)
+ }
+
+ fn resolve_value_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ if path.segments().is_empty() {
+ // This can't actually happen syntax-wise
+ return None;
+ }
+ let ty = self.make_ty(type_ref);
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
+ let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
+ self.resolve_ty_assoc_item(
+ ty,
+ path.segments().last().expect("path had at least one segment").name,
+ id,
+ )?
+ } else {
+ let value_or_partial =
+ resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index) => {
+ self.resolve_assoc_item(def, path, remaining_index, id)?
+ }
+ }
+ };
+
+ let typable: ValueTyDefId = match value {
+ ValueNs::LocalBinding(pat) => {
+ let ty = self.result.type_of_pat.get(pat)?.clone();
+ return Some(ty);
+ }
+ ValueNs::FunctionId(it) => it.into(),
+ ValueNs::ConstId(it) => it.into(),
+ ValueNs::StaticId(it) => it.into(),
+ ValueNs::StructId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::EnumVariantId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::ImplSelf(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = generics.placeholder_subst(self.db);
+ let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
+ if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
+ let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
+ return Some(ty);
+ } else {
+ // FIXME: diagnostic, invalid Self reference
+ return None;
+ }
+ }
+ ValueNs::GenericParam(it) => return Some(self.db.const_param_ty(it)),
+ };
+
+ let parent_substs = self_subst.unwrap_or_else(|| Substitution::empty(Interner));
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let substs = ctx.substs_from_path(path, typable, true);
+ let mut it = substs.as_slice(Interner)[parent_substs.len(Interner)..].iter().cloned();
+ let ty = TyBuilder::value_ty(self.db, typable)
+ .use_parent_substs(&parent_substs)
+ .fill(|x| {
+ it.next().unwrap_or_else(|| match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
+ })
+ })
+ .build();
+ Some(ty)
+ }
+
+ fn resolve_assoc_item(
+ &mut self,
+ def: TypeNs,
+ path: &Path,
+ remaining_index: usize,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ assert!(remaining_index < path.segments().len());
+ // there may be more intermediate segments between the resolved one and
+ // the end. Only the last segment needs to be resolved to a value; from
+ // the segments before that, we need to get either a type or a trait ref.
+
+ let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
+ let remaining_segments = path.segments().skip(remaining_index);
+ let is_before_last = remaining_segments.len() == 1;
+
+ match (def, is_before_last) {
+ (TypeNs::TraitId(trait_), true) => {
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let trait_ref =
+ ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ self.resolve_trait_assoc_item(trait_ref, segment, id)
+ }
+ (def, _) => {
+ // Either we already have a type (e.g. `Vec::new`), or we have a
+ // trait but it's not the last segment, so the next segment
+ // should resolve to an associated type of that trait (e.g. `<T
+ // as Iterator>::Item::default`)
+ let remaining_segments_for_ty =
+ remaining_segments.take(remaining_segments.len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let (ty, _) = ctx.lower_partly_resolved_path(
+ def,
+ resolved_segment,
+ remaining_segments_for_ty,
+ true,
+ );
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+
+ self.resolve_ty_assoc_item(ty, segment.name, id)
+ }
+ }
+ }
+
+ fn resolve_trait_assoc_item(
+ &mut self,
+ trait_ref: TraitRef,
+ segment: PathSegment<'_>,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let trait_ = trait_ref.hir_trait_id();
+ let item =
+ self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
+ match item {
+ AssocItemId::FunctionId(func) => {
+ if segment.name == &self.db.function_data(func).name {
+ Some(AssocItemId::FunctionId(func))
+ } else {
+ None
+ }
+ }
+
+ AssocItemId::ConstId(konst) => {
+ if self
+ .db
+ .const_data(konst)
+ .name
+ .as_ref()
+ .map_or(false, |n| n == segment.name)
+ {
+ Some(AssocItemId::ConstId(konst))
+ } else {
+ None
+ }
+ }
+ AssocItemId::TypeAliasId(_) => None,
+ }
+ })?;
+ let def = match item {
+ AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
+ AssocItemId::ConstId(c) => ValueNs::ConstId(c),
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, Some(trait_ref.substitution)))
+ }
+
+ fn resolve_ty_assoc_item(
+ &mut self,
+ ty: Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
+ return Some(result);
+ }
+
+ let canonical_ty = self.canonicalize(ty.clone());
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ method_resolution::iterate_method_candidates(
+ &canonical_ty.value,
+ self.db,
+ self.table.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ Some(name),
+ method_resolution::LookupMode::Path,
+ move |_ty, item| {
+ let (def, container) = match item {
+ AssocItemId::FunctionId(f) => {
+ (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::ConstId(c) => {
+ (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+ let substs = match container {
+ ItemContainerId::ImplId(impl_id) => {
+ let impl_substs = TyBuilder::subst_for_def(self.db, impl_id)
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ let impl_self_ty =
+ self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
+ self.unify(&impl_self_ty, &ty);
+ Some(impl_substs)
+ }
+ ItemContainerId::TraitId(trait_) => {
+ // we're picking this method
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(ty.clone())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.push_obligation(trait_ref.clone().cast(Interner));
+ Some(trait_ref.substitution)
+ }
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, substs))
+ },
+ )
+ }
+
+ fn resolve_enum_variant_on_ty(
+ &mut self,
+ ty: &Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let ty = self.resolve_ty_shallow(ty);
+ let (enum_id, subst) = match ty.as_adt() {
+ Some((AdtId::EnumId(e), subst)) => (e, subst),
+ _ => return None,
+ };
+ let enum_data = self.db.enum_data(enum_id);
+ let local_id = enum_data.variant(name)?;
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ self.write_variant_resolution(id, variant.into());
+ Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
new file mode 100644
index 000000000..e77b55670
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -0,0 +1,738 @@
+//! Unification and canonicalization logic.
+
+use std::{fmt, mem, sync::Arc};
+
+use chalk_ir::{
+ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy,
+ IntTy, NoSolution, TyVariableKind, UniverseIndex,
+};
+use chalk_solve::infer::ParameterEnaVariableExt;
+use ena::unify::UnifyKey;
+use hir_expand::name;
+use stdx::never;
+
+use super::{InferOk, InferResult, InferenceContext, TypeError};
+use crate::{
+ db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar,
+ Canonical, Const, DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, InEnvironment,
+ InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution,
+ Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ self.table.canonicalize(t)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Canonicalized<T>
+where
+ T: HasInterner<Interner = Interner>,
+{
+ pub(crate) value: Canonical<T>,
+ free_vars: Vec<GenericArg>,
+}
+
+impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
+ pub(super) fn apply_solution(
+ &self,
+ ctx: &mut InferenceTable<'_>,
+ solution: Canonical<Substitution>,
+ ) {
+ // the solution may contain new variables, which we need to convert to new inference vars
+ let new_vars = Substitution::from_iter(
+ Interner,
+ solution.binders.iter(Interner).map(|k| match &k.kind {
+ VariableKind::Ty(TyVariableKind::General) => ctx.new_type_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Integer) => ctx.new_integer_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
+ // Chalk can sometimes return new lifetime variables. We just use the static lifetime everywhere
+ VariableKind::Lifetime => static_lifetime().cast(Interner),
+ VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
+ }),
+ );
+ for (i, v) in solution.value.iter(Interner).enumerate() {
+ let var = self.free_vars[i].clone();
+ if let Some(ty) = v.ty(Interner) {
+ // eagerly replace projections in the type; we may be getting types
+ // e.g. from where clauses where this hasn't happened yet
+ let ty = ctx.normalize_associated_types_in(new_vars.apply(ty.clone(), Interner));
+ ctx.unify(var.assert_ty_ref(Interner), &ty);
+ } else {
+ let _ = ctx.try_unify(&var, &new_vars.apply(v.clone(), Interner));
+ }
+ }
+ }
+}
+
+pub fn could_unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ unify(db, env, tys).is_some()
+}
+
+pub(crate) fn unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Option<Substitution> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = Substitution::from_iter(
+ Interner,
+ tys.binders.iter(Interner).map(|x| match &x.kind {
+ chalk_ir::VariableKind::Ty(_) => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ }
+ chalk_ir::VariableKind::Lifetime => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ } // FIXME: maybe wrong?
+ chalk_ir::VariableKind::Const(ty) => {
+ GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
+ }
+ }),
+ );
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ if !table.unify(&ty1_with_vars, &ty2_with_vars) {
+ return None;
+ }
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ Some(Substitution::from_iter(
+ Interner,
+ vars.iter(Interner).map(|v| table.resolve_with_fallback(v.clone(), &fallback)),
+ ))
+}
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct TypeVariableData {
+ diverging: bool,
+}
+
+type ChalkInferenceTable = chalk_solve::infer::InferenceTable<Interner>;
+
+#[derive(Clone)]
+pub(crate) struct InferenceTable<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) trait_env: Arc<TraitEnvironment>,
+ var_unification_table: ChalkInferenceTable,
+ type_variable_table: Vec<TypeVariableData>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+}
+
+pub(crate) struct InferenceTableSnapshot {
+ var_table_snapshot: chalk_solve::infer::InferenceSnapshot<Interner>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+ type_variable_table_snapshot: Vec<TypeVariableData>,
+}
+
+impl<'a> InferenceTable<'a> {
+ pub(crate) fn new(db: &'a dyn HirDatabase, trait_env: Arc<TraitEnvironment>) -> Self {
+ InferenceTable {
+ db,
+ trait_env,
+ var_unification_table: ChalkInferenceTable::new(),
+ type_variable_table: Vec::new(),
+ pending_obligations: Vec::new(),
+ }
+ }
+
+ /// Chalk doesn't know about the `diverging` flag, so when it unifies two
+ /// type variables of which one is diverging, the chosen root might not be
+ /// diverging and we have no way of marking it as such at that time. This
+ /// function goes through all type variables and make sure their root is
+ /// marked as diverging if necessary, so that resolving them gives the right
+ /// result.
+ pub(super) fn propagate_diverging_flag(&mut self) {
+ for i in 0..self.type_variable_table.len() {
+ if !self.type_variable_table[i].diverging {
+ continue;
+ }
+ let v = InferenceVar::from(i as u32);
+ let root = self.var_unification_table.inference_var_root(v);
+ if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) {
+ data.diverging = true;
+ }
+ }
+ }
+
+ pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) {
+ self.type_variable_table[iv.index() as usize].diverging = diverging;
+ }
+
+ fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty {
+ match kind {
+ _ if self
+ .type_variable_table
+ .get(iv.index() as usize)
+ .map_or(false, |data| data.diverging) =>
+ {
+ TyKind::Never
+ }
+ TyVariableKind::General => TyKind::Error,
+ TyVariableKind::Integer => TyKind::Scalar(Scalar::Int(IntTy::I32)),
+ TyVariableKind::Float => TyKind::Scalar(Scalar::Float(FloatTy::F64)),
+ }
+ .intern(Interner)
+ }
+
+ pub(crate) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ // try to resolve obligations before canonicalizing, since this might
+ // result in new knowledge about variables
+ self.resolve_obligations_as_possible();
+ let result = self.var_unification_table.canonicalize(Interner, t);
+ let free_vars = result
+ .free_vars
+ .into_iter()
+ .map(|free_var| free_var.to_generic_arg(Interner))
+ .collect();
+ Canonicalized { value: result.quantified, free_vars }
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ pub(crate) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ fold_tys(
+ ty,
+ |ty, _| match ty.kind(Interner) {
+ TyKind::Alias(AliasTy::Projection(proj_ty)) => {
+ self.normalize_projection_ty(proj_ty.clone())
+ }
+ _ => ty,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
+ let var = self.new_type_var();
+ let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() };
+ let obligation = alias_eq.cast(Interner);
+ self.register_obligation(obligation);
+ var
+ }
+
+ fn extend_type_variable_table(&mut self, to_index: usize) {
+ self.type_variable_table.extend(
+ (0..1 + to_index - self.type_variable_table.len())
+ .map(|_| TypeVariableData { diverging: false }),
+ );
+ }
+
+ fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ // Chalk might have created some type variables for its own purposes that we don't know about...
+ self.extend_type_variable_table(var.index() as usize);
+ assert_eq!(var.index() as usize, self.type_variable_table.len() - 1);
+ self.type_variable_table[var.index() as usize].diverging = diverging;
+ var.to_ty_with_kind(Interner, kind)
+ }
+
+ pub(crate) fn new_type_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, false)
+ }
+
+ pub(crate) fn new_integer_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Integer, false)
+ }
+
+ pub(crate) fn new_float_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Float, false)
+ }
+
+ pub(crate) fn new_maybe_never_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, true)
+ }
+
+ pub(crate) fn new_const_var(&mut self, ty: Ty) -> Const {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_const(Interner, ty)
+ }
+
+ pub(crate) fn new_lifetime_var(&mut self) -> Lifetime {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_lifetime(Interner)
+ }
+
+ pub(crate) fn resolve_with_fallback<T>(
+ &mut self,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback_inner(&mut Vec::new(), t, &fallback)
+ }
+
+ pub(crate) fn fresh_subst(&mut self, binders: &[CanonicalVarKind<Interner>]) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ binders.iter().map(|kind| {
+ let param_infer_var =
+ kind.map_ref(|&ui| self.var_unification_table.new_variable(ui));
+ param_infer_var.to_generic_arg(Interner)
+ }),
+ )
+ }
+
+ pub(crate) fn instantiate_canonical<T>(&mut self, canonical: Canonical<T>) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + std::fmt::Debug,
+ {
+ let subst = self.fresh_subst(canonical.binders.as_slice(Interner));
+ subst.apply(canonical.value, Interner)
+ }
+
+ fn resolve_with_fallback_inner<T>(
+ &mut self,
+ var_stack: &mut Vec<InferenceVar>,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ t.fold_with(
+ &mut resolve::Resolver { table: self, var_stack, fallback },
+ DebruijnIndex::INNERMOST,
+ )
+ .expect("fold failed unexpectedly")
+ }
+
+ pub(crate) fn resolve_completely<T>(&mut self, t: T) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback(t, &|_, _, d, _| d)
+ }
+
+ /// Unify two types and register new trait goals that arise from that.
+ pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ let result = match self.try_unify(ty1, ty2) {
+ Ok(r) => r,
+ Err(_) => return false,
+ };
+ self.register_infer_ok(result);
+ true
+ }
+
+ /// Unify two types and return new trait goals arising from it, so the
+ /// caller needs to deal with them.
+ pub(crate) fn try_unify<T: Zip<Interner>>(&mut self, t1: &T, t2: &T) -> InferResult<()> {
+ match self.var_unification_table.relate(
+ Interner,
+ &self.db,
+ &self.trait_env.env,
+ chalk_ir::Variance::Invariant,
+ t1,
+ t2,
+ ) {
+ Ok(result) => Ok(InferOk { goals: result.goals, value: () }),
+ Err(chalk_ir::NoSolution) => Err(TypeError),
+ }
+ }
+
+ /// If `ty` is a type variable with known type, returns that type;
+ /// otherwise, return ty.
+ pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+ self.var_unification_table.normalize_ty_shallow(Interner, ty).unwrap_or_else(|| ty.clone())
+ }
+
+ pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot {
+ let var_table_snapshot = self.var_unification_table.snapshot();
+ let type_variable_table_snapshot = self.type_variable_table.clone();
+ let pending_obligations = self.pending_obligations.clone();
+ InferenceTableSnapshot {
+ var_table_snapshot,
+ pending_obligations,
+ type_variable_table_snapshot,
+ }
+ }
+
+ pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) {
+ self.var_unification_table.rollback_to(snapshot.var_table_snapshot);
+ self.type_variable_table = snapshot.type_variable_table_snapshot;
+ self.pending_obligations = snapshot.pending_obligations;
+ }
+
+ pub(crate) fn run_in_snapshot<T>(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T {
+ let snapshot = self.snapshot();
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ }
+
+ /// Checks an obligation without registering it. Useful mostly to check
+ /// whether a trait *might* be implemented before deciding to 'lock in' the
+ /// choice (during e.g. method resolution or deref).
+ pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option<Solution> {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ let canonicalized = self.canonicalize(in_env);
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value);
+ solution
+ }
+
+ pub(crate) fn register_obligation(&mut self, goal: Goal) {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ self.register_obligation_in_env(in_env)
+ }
+
+ fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
+ let canonicalized = self.canonicalize(goal);
+ if !self.try_resolve_obligation(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ }
+ }
+
+ pub(crate) fn register_infer_ok<T>(&mut self, infer_ok: InferOk<T>) {
+ infer_ok.goals.into_iter().for_each(|goal| self.register_obligation_in_env(goal));
+ }
+
+ pub(crate) fn resolve_obligations_as_possible(&mut self) {
+ let _span = profile::span("resolve_obligations_as_possible");
+ let mut changed = true;
+ let mut obligations = Vec::new();
+ while changed {
+ changed = false;
+ mem::swap(&mut self.pending_obligations, &mut obligations);
+ for canonicalized in obligations.drain(..) {
+ if !self.check_changed(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ continue;
+ }
+ changed = true;
+ let uncanonical = chalk_ir::Substitute::apply(
+ &canonicalized.free_vars,
+ canonicalized.value.value,
+ Interner,
+ );
+ self.register_obligation_in_env(uncanonical);
+ }
+ }
+ }
+
+ pub(crate) fn fudge_inference<T: TypeFoldable<Interner>>(
+ &mut self,
+ f: impl FnOnce(&mut Self) -> T,
+ ) -> T {
+ use chalk_ir::fold::TypeFolder;
+ struct VarFudger<'a, 'b> {
+ table: &'a mut InferenceTable<'b>,
+ highest_known_var: InferenceVar,
+ }
+ impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ kind: TyVariableKind,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Ty<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_ty(Interner, kind)
+ } else {
+ self.table.new_type_var()
+ })
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Lifetime<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_lifetime(Interner)
+ } else {
+ self.table.new_lifetime_var()
+ })
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: chalk_ir::Ty<Interner>,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Const<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_const(Interner, ty)
+ } else {
+ self.table.new_const_var(ty)
+ })
+ }
+ }
+
+ let snapshot = self.snapshot();
+ let highest_known_var = self.new_type_var().inference_var(Interner).expect("inference_var");
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ .fold_with(&mut VarFudger { table: self, highest_known_var }, DebruijnIndex::INNERMOST)
+ .expect("fold_with with VarFudger")
+ }
+
+ /// This checks whether any of the free variables in the `canonicalized`
+ /// have changed (either been unified with another variable, or with a
+ /// value). If this is not the case, we don't need to try to solve the goal
+ /// again -- it'll give the same result as last time.
+ fn check_changed(&mut self, canonicalized: &Canonicalized<InEnvironment<Goal>>) -> bool {
+ canonicalized.free_vars.iter().any(|var| {
+ let iv = match var.data(Interner) {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ }
+ .expect("free var is not inference var");
+ if self.var_unification_table.probe_var(iv).is_some() {
+ return true;
+ }
+ let root = self.var_unification_table.inference_var_root(iv);
+ iv != root
+ })
+ }
+
+ fn try_resolve_obligation(
+ &mut self,
+ canonicalized: &Canonicalized<InEnvironment<Goal>>,
+ ) -> bool {
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value.clone());
+
+ match solution {
+ Some(Solution::Unique(canonical_subst)) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: canonical_subst.binders,
+ // FIXME: handle constraints
+ value: canonical_subst.value.subst,
+ },
+ );
+ true
+ }
+ Some(Solution::Ambig(Guidance::Definite(substs))) => {
+ canonicalized.apply_solution(self, substs);
+ false
+ }
+ Some(_) => {
+ // FIXME use this when trying to resolve everything at the end
+ false
+ }
+ None => {
+ // FIXME obligation cannot be fulfilled => diagnostic
+ true
+ }
+ }
+ }
+
+ pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ match ty.callable_sig(self.db) {
+ Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
+ None => self.callable_sig_from_fn_trait(ty, num_args),
+ }
+ }
+
+ fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ let krate = self.trait_env.krate;
+ let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
+ let output_assoc_type =
+ self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
+
+ let mut arg_tys = vec![];
+ let arg_ty = TyBuilder::tuple(num_args)
+ .fill(|x| {
+ let arg = match x {
+ ParamKind::Type => self.new_type_var(),
+ ParamKind::Const(ty) => {
+ never!("Tuple with const parameter");
+ return GenericArgData::Const(self.new_const_var(ty.clone()))
+ .intern(Interner);
+ }
+ };
+ arg_tys.push(arg.clone());
+ GenericArgData::Ty(arg).intern(Interner)
+ })
+ .build();
+
+ let projection = {
+ let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type);
+ if b.remaining() != 2 {
+ return None;
+ }
+ b.push(ty.clone()).push(arg_ty).build()
+ };
+
+ let trait_env = self.trait_env.env.clone();
+ let obligation = InEnvironment {
+ goal: projection.trait_ref(self.db).cast(Interner),
+ environment: trait_env,
+ };
+ let canonical = self.canonicalize(obligation.clone());
+ if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() {
+ self.register_obligation(obligation.goal);
+ let return_ty = self.normalize_projection_ty(projection);
+ Some((arg_tys, return_ty))
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a> fmt::Debug for InferenceTable<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
+ }
+}
+
+mod resolve {
+ use super::InferenceTable;
+ use crate::{
+ ConcreteConst, Const, ConstData, ConstValue, DebruijnIndex, GenericArg, InferenceVar,
+ Interner, Lifetime, Ty, TyVariableKind, VariableKind,
+ };
+ use chalk_ir::{
+ cast::Cast,
+ fold::{TypeFoldable, TypeFolder},
+ Fallible, NoSolution,
+ };
+ use hir_def::type_ref::ConstScalar;
+
+ pub(super) struct Resolver<'a, 'b, F> {
+ pub(super) table: &'a mut InferenceTable<'b>,
+ pub(super) var_stack: &'a mut Vec<InferenceVar>,
+ pub(super) fallback: F,
+ }
+ impl<'a, 'b, 'i, F> TypeFolder<Interner> for Resolver<'a, 'b, F>
+ where
+ F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg + 'i,
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: InferenceVar,
+ kind: TyVariableKind,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ if self.var_stack.contains(&var) {
+ // recursive type
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ return Ok((self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_ty_ref(Interner).clone()
+ } else {
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: Ty,
+ var: InferenceVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ let default = ConstData {
+ ty: ty.clone(),
+ value: ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Unknown }),
+ }
+ .intern(Interner)
+ .cast(Interner);
+ if self.var_stack.contains(&var) {
+ // recursive
+ return Ok((self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_const_ref(Interner).clone()
+ } else {
+ (self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ // fall back all lifetimes to 'static -- currently we don't deal
+ // with any lifetimes, but we can sometimes get some lifetime
+ // variables through Chalk's unification, and this at least makes
+ // sure we don't leak them outside of inference
+ Ok(crate::static_lifetime())
+ }
+ }
+}