summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir-ty/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /src/tools/rust-analyzer/crates/hir-ty/src
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir-ty/src')
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs145
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/builder.rs311
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs799
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs358
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs469
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs148
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs225
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs701
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs199
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs416
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs508
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs1094
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs56
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs811
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs104
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs1315
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs1088
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs82
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs673
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs1527
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs354
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs295
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs738
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/interner.rs432
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs525
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs1778
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs148
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs1186
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs62
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs150
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs578
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs755
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs75
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs176
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs51
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs1338
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs1792
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs485
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs991
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs1650
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs3072
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs3782
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tls.rs133
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs187
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs408
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/walk.rs147
47 files changed, 32330 insertions, 0 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
new file mode 100644
index 000000000..b6f226dbf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -0,0 +1,145 @@
+//! In certain situations, rust automatically inserts derefs as necessary: for
+//! example, field accesses `foo.bar` still work when `foo` is actually a
+//! reference to a type with the field `bar`. This is an approximation of the
+//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
+
+use std::sync::Arc;
+
+use chalk_ir::cast::Cast;
+use hir_expand::name::name;
+use limit::Limit;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt,
+ TraitEnvironment, Ty, TyBuilder, TyKind,
+};
+
+static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10);
+
+pub(crate) enum AutoderefKind {
+ Builtin,
+ Overloaded,
+}
+
+pub(crate) struct Autoderef<'a, 'db> {
+ pub(crate) table: &'a mut InferenceTable<'db>,
+ ty: Ty,
+ at_start: bool,
+ steps: Vec<(AutoderefKind, Ty)>,
+}
+
+impl<'a, 'db> Autoderef<'a, 'db> {
+ pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self {
+ let ty = table.resolve_ty_shallow(&ty);
+ Autoderef { table, ty, at_start: true, steps: Vec::new() }
+ }
+
+ pub(crate) fn step_count(&self) -> usize {
+ self.steps.len()
+ }
+
+ pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] {
+ &self.steps
+ }
+
+ pub(crate) fn final_ty(&self) -> Ty {
+ self.ty.clone()
+ }
+}
+
+impl Iterator for Autoderef<'_, '_> {
+ type Item = (Ty, usize);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.at_start {
+ self.at_start = false;
+ return Some((self.ty.clone(), 0));
+ }
+
+ if AUTODEREF_RECURSION_LIMIT.check(self.steps.len() + 1).is_err() {
+ return None;
+ }
+
+ let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?;
+
+ self.steps.push((kind, self.ty.clone()));
+ self.ty = new_ty;
+
+ Some((self.ty.clone(), self.step_count()))
+ }
+}
+
+pub(crate) fn autoderef_step(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> Option<(AutoderefKind, Ty)> {
+ if let Some(derefed) = builtin_deref(&ty) {
+ Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
+ } else {
+ Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
+ }
+}
+
+// FIXME: replace uses of this with Autoderef above
+pub fn autoderef<'a>(
+ db: &'a dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+) -> impl Iterator<Item = Canonical<Ty>> + 'a {
+ let mut table = InferenceTable::new(db, env);
+ let ty = table.instantiate_canonical(ty);
+ let mut autoderef = Autoderef::new(&mut table, ty);
+ let mut v = Vec::new();
+ while let Some((ty, _steps)) = autoderef.next() {
+ v.push(autoderef.table.canonicalize(ty).value);
+ }
+ v.into_iter()
+}
+
+pub(crate) fn deref(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
+ let _p = profile::span("deref");
+ autoderef_step(table, ty).map(|(_, ty)| ty)
+}
+
+fn builtin_deref(ty: &Ty) -> Option<&Ty> {
+ match ty.kind(Interner) {
+ TyKind::Ref(.., ty) => Some(ty),
+ TyKind::Raw(.., ty) => Some(ty),
+ _ => None,
+ }
+}
+
+fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
+ let _p = profile::span("deref_by_trait");
+ if table.resolve_ty_shallow(&ty).inference_var(Interner).is_some() {
+ // don't try to deref unknown variables
+ return None;
+ }
+
+ let db = table.db;
+ let deref_trait = db
+ .lang_item(table.trait_env.krate, SmolStr::new_inline("deref"))
+ .and_then(|l| l.as_trait())?;
+ let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
+
+ let projection = {
+ let b = TyBuilder::assoc_type_projection(db, target);
+ if b.remaining() != 1 {
+ // the Target type + Deref trait should only have one generic parameter,
+ // namely Deref's Self type
+ return None;
+ }
+ b.push(ty).build()
+ };
+
+ // Check that the type implements Deref at all
+ let trait_ref = projection.trait_ref(db);
+ let implements_goal: Goal = trait_ref.cast(Interner);
+ table.try_obligation(implements_goal.clone())?;
+
+ table.register_obligation(implements_goal);
+
+ let result = table.normalize_projection_ty(projection);
+ Some(table.resolve_ty_shallow(&result))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
new file mode 100644
index 000000000..94d7806cb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
@@ -0,0 +1,311 @@
+//! `TyBuilder`, a helper for building instances of `Ty` and related types.
+
+use std::iter;
+
+use chalk_ir::{
+ cast::{Cast, CastTo, Caster},
+ fold::TypeFoldable,
+ interner::HasInterner,
+ AdtId, BoundVar, DebruijnIndex, Scalar,
+};
+use hir_def::{
+ builtin_type::BuiltinType, generics::TypeOrConstParamData, ConstParamId, GenericDefId, TraitId,
+ TypeAliasId,
+};
+use smallvec::SmallVec;
+
+use crate::{
+ consteval::unknown_const_as_generic, db::HirDatabase, infer::unify::InferenceTable, primitive,
+ to_assoc_type_id, to_chalk_trait_id, utils::generics, Binders, CallableSig, ConstData,
+ ConstValue, GenericArg, GenericArgData, Interner, ProjectionTy, Substitution, TraitRef, Ty,
+ TyDefId, TyExt, TyKind, ValueTyDefId,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ParamKind {
+ Type,
+ Const(Ty),
+}
+
+/// This is a builder for `Ty` or anything that needs a `Substitution`.
+pub struct TyBuilder<D> {
+ /// The `data` field is used to keep track of what we're building (e.g. an
+ /// ADT, a `TraitRef`, ...).
+ data: D,
+ vec: SmallVec<[GenericArg; 2]>,
+ param_kinds: SmallVec<[ParamKind; 2]>,
+}
+
+impl<A> TyBuilder<A> {
+ fn with_data<B>(self, data: B) -> TyBuilder<B> {
+ TyBuilder { data, param_kinds: self.param_kinds, vec: self.vec }
+ }
+}
+
+impl<D> TyBuilder<D> {
+ fn new(data: D, param_kinds: SmallVec<[ParamKind; 2]>) -> TyBuilder<D> {
+ TyBuilder { data, vec: SmallVec::with_capacity(param_kinds.len()), param_kinds }
+ }
+
+ fn build_internal(self) -> (D, Substitution) {
+ assert_eq!(self.vec.len(), self.param_kinds.len());
+ for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) {
+ self.assert_match_kind(a, e);
+ }
+ let subst = Substitution::from_iter(Interner, self.vec);
+ (self.data, subst)
+ }
+
+ pub fn push(mut self, arg: impl CastTo<GenericArg>) -> Self {
+ let arg = arg.cast(Interner);
+ let expected_kind = &self.param_kinds[self.vec.len()];
+ let arg_kind = match arg.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => ParamKind::Type,
+ chalk_ir::GenericArgData::Lifetime(_) => panic!("Got lifetime in TyBuilder::push"),
+ chalk_ir::GenericArgData::Const(c) => {
+ let c = c.data(Interner);
+ ParamKind::Const(c.ty.clone())
+ }
+ };
+ assert_eq!(*expected_kind, arg_kind);
+ self.vec.push(arg);
+ self
+ }
+
+ pub fn remaining(&self) -> usize {
+ self.param_kinds.len() - self.vec.len()
+ }
+
+ pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self {
+ // self.fill is inlined to make borrow checker happy
+ let mut this = self;
+ let other = this.param_kinds.iter().skip(this.vec.len());
+ let filler = (starting_from..).zip(other).map(|(idx, kind)| match kind {
+ ParamKind::Type => {
+ GenericArgData::Ty(TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner))
+ .intern(Interner)
+ }
+ ParamKind::Const(ty) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
+ ty: ty.clone(),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ });
+ this.vec.extend(filler.take(this.remaining()).casted(Interner));
+ assert_eq!(this.remaining(), 0);
+ this
+ }
+
+ pub fn fill_with_unknown(self) -> Self {
+ // self.fill is inlined to make borrow checker happy
+ let mut this = self;
+ let filler = this.param_kinds.iter().skip(this.vec.len()).map(|x| match x {
+ ParamKind::Type => GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ });
+ this.vec.extend(filler.casted(Interner));
+ assert_eq!(this.remaining(), 0);
+ this
+ }
+
+ pub(crate) fn fill_with_inference_vars(self, table: &mut InferenceTable<'_>) -> Self {
+ self.fill(|x| match x {
+ ParamKind::Type => GenericArgData::Ty(table.new_type_var()).intern(Interner),
+ ParamKind::Const(ty) => {
+ GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
+ }
+ })
+ }
+
+ pub fn fill(mut self, filler: impl FnMut(&ParamKind) -> GenericArg) -> Self {
+ self.vec.extend(self.param_kinds.iter().skip(self.vec.len()).map(filler));
+ assert_eq!(self.remaining(), 0);
+ self
+ }
+
+ pub fn use_parent_substs(mut self, parent_substs: &Substitution) -> Self {
+ assert!(self.vec.is_empty());
+ assert!(parent_substs.len(Interner) <= self.param_kinds.len());
+ self.extend(parent_substs.iter(Interner).cloned());
+ self
+ }
+
+ fn extend(&mut self, it: impl Iterator<Item = GenericArg> + Clone) {
+ for x in it.clone().zip(self.param_kinds.iter().skip(self.vec.len())) {
+ self.assert_match_kind(&x.0, &x.1);
+ }
+ self.vec.extend(it);
+ }
+
+ fn assert_match_kind(&self, a: &chalk_ir::GenericArg<Interner>, e: &ParamKind) {
+ match (a.data(Interner), e) {
+ (chalk_ir::GenericArgData::Ty(_), ParamKind::Type)
+ | (chalk_ir::GenericArgData::Const(_), ParamKind::Const(_)) => (),
+ _ => panic!("Mismatched kinds: {:?}, {:?}, {:?}", a, self.vec, self.param_kinds),
+ }
+ }
+}
+
+impl TyBuilder<()> {
+ pub fn unit() -> Ty {
+ TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner)
+ }
+
+ pub fn usize() -> Ty {
+ TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner)
+ }
+
+ pub fn fn_ptr(sig: CallableSig) -> Ty {
+ TyKind::Function(sig.to_fn_ptr()).intern(Interner)
+ }
+
+ pub fn builtin(builtin: BuiltinType) -> Ty {
+ match builtin {
+ BuiltinType::Char => TyKind::Scalar(Scalar::Char).intern(Interner),
+ BuiltinType::Bool => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BuiltinType::Str => TyKind::Str.intern(Interner),
+ BuiltinType::Int(t) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(t))).intern(Interner)
+ }
+ BuiltinType::Uint(t) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(t))).intern(Interner)
+ }
+ BuiltinType::Float(t) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(t))).intern(Interner)
+ }
+ }
+ }
+
+ pub fn slice(argument: Ty) -> Ty {
+ TyKind::Slice(argument).intern(Interner)
+ }
+
+ pub fn placeholder_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
+ let params = generics(db.upcast(), def.into());
+ params.placeholder_subst(db)
+ }
+
+ pub fn subst_for_def(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> TyBuilder<()> {
+ let def = def.into();
+ let params = generics(db.upcast(), def);
+ TyBuilder::new(
+ (),
+ params
+ .iter()
+ .map(|(id, data)| match data {
+ TypeOrConstParamData::TypeParamData(_) => ParamKind::Type,
+ TypeOrConstParamData::ConstParamData(_) => {
+ ParamKind::Const(db.const_param_ty(ConstParamId::from_unchecked(id)))
+ }
+ })
+ .collect(),
+ )
+ }
+
+ pub fn build(self) -> Substitution {
+ let ((), subst) = self.build_internal();
+ subst
+ }
+}
+
+impl TyBuilder<hir_def::AdtId> {
+ pub fn adt(db: &dyn HirDatabase, def: hir_def::AdtId) -> TyBuilder<hir_def::AdtId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn fill_with_defaults(
+ mut self,
+ db: &dyn HirDatabase,
+ mut fallback: impl FnMut() -> Ty,
+ ) -> Self {
+ let defaults = db.generic_defaults(self.data.into());
+ for default_ty in defaults.iter().skip(self.vec.len()) {
+ if let GenericArgData::Ty(x) = default_ty.skip_binders().data(Interner) {
+ if x.is_unknown() {
+ self.vec.push(fallback().cast(Interner));
+ continue;
+ }
+ };
+ // each default can depend on the previous parameters
+ let subst_so_far = Substitution::from_iter(Interner, self.vec.clone());
+ self.vec.push(default_ty.clone().substitute(Interner, &subst_so_far).cast(Interner));
+ }
+ self
+ }
+
+ pub fn build(self) -> Ty {
+ let (adt, subst) = self.build_internal();
+ TyKind::Adt(AdtId(adt), subst).intern(Interner)
+ }
+}
+
+pub struct Tuple(usize);
+impl TyBuilder<Tuple> {
+ pub fn tuple(size: usize) -> TyBuilder<Tuple> {
+ TyBuilder::new(Tuple(size), iter::repeat(ParamKind::Type).take(size).collect())
+ }
+
+ pub fn build(self) -> Ty {
+ let (Tuple(size), subst) = self.build_internal();
+ TyKind::Tuple(size, subst).intern(Interner)
+ }
+}
+
+impl TyBuilder<TraitId> {
+ pub fn trait_ref(db: &dyn HirDatabase, def: TraitId) -> TyBuilder<TraitId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn build(self) -> TraitRef {
+ let (trait_id, substitution) = self.build_internal();
+ TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution }
+ }
+}
+
+impl TyBuilder<TypeAliasId> {
+ pub fn assoc_type_projection(db: &dyn HirDatabase, def: TypeAliasId) -> TyBuilder<TypeAliasId> {
+ TyBuilder::subst_for_def(db, def).with_data(def)
+ }
+
+ pub fn build(self) -> ProjectionTy {
+ let (type_alias, substitution) = self.build_internal();
+ ProjectionTy { associated_ty_id: to_assoc_type_id(type_alias), substitution }
+ }
+}
+
+impl<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>> TyBuilder<Binders<T>> {
+ fn subst_binders(b: Binders<T>) -> Self {
+ let param_kinds = b
+ .binders
+ .iter(Interner)
+ .map(|x| match x {
+ chalk_ir::VariableKind::Ty(_) => ParamKind::Type,
+ chalk_ir::VariableKind::Lifetime => panic!("Got lifetime parameter"),
+ chalk_ir::VariableKind::Const(ty) => ParamKind::Const(ty.clone()),
+ })
+ .collect();
+ TyBuilder::new(b, param_kinds)
+ }
+
+ pub fn build(self) -> T {
+ let (b, subst) = self.build_internal();
+ b.substitute(Interner, &subst)
+ }
+}
+
+impl TyBuilder<Binders<Ty>> {
+ pub fn def_ty(db: &dyn HirDatabase, def: TyDefId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.ty(def))
+ }
+
+ pub fn impl_self_ty(db: &dyn HirDatabase, def: hir_def::ImplId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.impl_self_ty(def))
+ }
+
+ pub fn value_ty(db: &dyn HirDatabase, def: ValueTyDefId) -> TyBuilder<Binders<Ty>> {
+ TyBuilder::subst_binders(db.value_ty(def))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
new file mode 100644
index 000000000..faec99c7d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -0,0 +1,799 @@
+//! The implementation of `RustIrDatabase` for Chalk, which provides information
+//! about the code that Chalk needs.
+use std::sync::Arc;
+
+use cov_mark::hit;
+use syntax::SmolStr;
+use tracing::debug;
+
+use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
+use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
+
+use base_db::CrateId;
+use hir_def::{
+ lang_item::{lang_attr, LangItemTarget},
+ AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId,
+};
+use hir_expand::name::name;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, make_binders,
+ make_single_type_binders,
+ mapping::{from_chalk, ToChalk, TypeAliasAsValue},
+ method_resolution::{TraitImpls, TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
+ to_assoc_type_id, to_chalk_trait_id,
+ traits::ChalkContext,
+ utils::generics,
+ AliasEq, AliasTy, BoundVar, CallableDefId, DebruijnIndex, FnDefId, Interner, ProjectionTy,
+ ProjectionTyExt, QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder,
+ TyExt, TyKind, WhereClause,
+};
+
+pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
+pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
+pub(crate) type StructDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
+pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
+pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>;
+
+pub(crate) type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub(crate) type TraitId = chalk_ir::TraitId<Interner>;
+pub(crate) type AdtId = chalk_ir::AdtId<Interner>;
+pub(crate) type ImplId = chalk_ir::ImplId<Interner>;
+pub(crate) type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId<Interner>;
+pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Interner>;
+pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
+pub(crate) type Variances = chalk_ir::Variances<Interner>;
+
+impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
+ fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
+ self.db.associated_ty_data(id)
+ }
+ fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
+ self.db.trait_datum(self.krate, trait_id)
+ }
+ fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
+ self.db.struct_datum(self.krate, struct_id)
+ }
+ fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> {
+ // FIXME: keep track of these
+ Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
+ }
+ fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: keep track of this
+ chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner)
+ }
+ fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
+ self.db.impl_datum(self.krate, impl_id)
+ }
+
+ fn fn_def_datum(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ ) -> Arc<rust_ir::FnDefDatum<Interner>> {
+ self.db.fn_def_datum(self.krate, fn_def_id)
+ }
+
+ fn impls_for_trait(
+ &self,
+ trait_id: TraitId,
+ parameters: &[chalk_ir::GenericArg<Interner>],
+ binders: &CanonicalVarKinds<Interner>,
+ ) -> Vec<ImplId> {
+ debug!("impls_for_trait {:?}", trait_id);
+ let trait_: hir_def::TraitId = from_chalk_trait_id(trait_id);
+
+ let ty: Ty = parameters[0].assert_ty_ref(Interner).clone();
+
+ fn binder_kind(
+ ty: &Ty,
+ binders: &CanonicalVarKinds<Interner>,
+ ) -> Option<chalk_ir::TyVariableKind> {
+ if let TyKind::BoundVar(bv) = ty.kind(Interner) {
+ let binders = binders.as_slice(Interner);
+ if bv.debruijn == DebruijnIndex::INNERMOST {
+ if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind {
+ return Some(tk);
+ }
+ }
+ }
+ None
+ }
+
+ let self_ty_fp = TyFingerprint::for_trait_impl(&ty);
+ let fps: &[TyFingerprint] = match binder_kind(&ty, binders) {
+ Some(chalk_ir::TyVariableKind::Integer) => &ALL_INT_FPS,
+ Some(chalk_ir::TyVariableKind::Float) => &ALL_FLOAT_FPS,
+ _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
+ };
+
+ fn local_impls(db: &dyn HirDatabase, module: ModuleId) -> Option<Arc<TraitImpls>> {
+ let block = module.containing_block()?;
+ hit!(block_local_impls);
+ db.trait_impls_in_block(block)
+ }
+
+ // Note: Since we're using impls_for_trait, only impls where the trait
+ // can be resolved should ever reach Chalk. impl_datum relies on that
+ // and will panic if the trait can't be resolved.
+ let in_deps = self.db.trait_impls_in_deps(self.krate);
+ let in_self = self.db.trait_impls_in_crate(self.krate);
+ let trait_module = trait_.module(self.db.upcast());
+ let type_module = match self_ty_fp {
+ Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
+ Some(TyFingerprint::ForeignType(type_id)) => {
+ Some(from_foreign_def_id(type_id).module(self.db.upcast()))
+ }
+ Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
+ _ => None,
+ };
+ let impl_maps = [
+ Some(in_deps),
+ Some(in_self),
+ local_impls(self.db, trait_module),
+ type_module.and_then(|m| local_impls(self.db, m)),
+ ];
+
+ let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
+
+ let result: Vec<_> = if fps.is_empty() {
+ debug!("Unrestricted search for {:?} impls...", trait_);
+ impl_maps
+ .iter()
+ .filter_map(|o| o.as_ref())
+ .flat_map(|impls| impls.for_trait(trait_).map(id_to_chalk))
+ .collect()
+ } else {
+ impl_maps
+ .iter()
+ .filter_map(|o| o.as_ref())
+ .flat_map(|impls| {
+ fps.iter().flat_map(move |fp| {
+ impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
+ })
+ })
+ .collect()
+ };
+
+ debug!("impls_for_trait returned {} impls", result.len());
+ result
+ }
+ fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind<Interner>) -> bool {
+ debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind);
+ false // FIXME
+ }
+ fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
+ self.db.associated_ty_value(self.krate, id)
+ }
+
+ fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<Interner>> {
+ vec![]
+ }
+ fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec<ImplId> {
+ // We don't do coherence checking (yet)
+ unimplemented!()
+ }
+ fn interner(&self) -> Interner {
+ Interner
+ }
+ fn well_known_trait_id(
+ &self,
+ well_known_trait: rust_ir::WellKnownTrait,
+ ) -> Option<chalk_ir::TraitId<Interner>> {
+ let lang_attr = lang_attr_from_well_known_trait(well_known_trait);
+ let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return None,
+ };
+ Some(to_chalk_trait_id(trait_))
+ }
+
+ fn program_clauses_for_env(
+ &self,
+ environment: &chalk_ir::Environment<Interner>,
+ ) -> chalk_ir::ProgramClauses<Interner> {
+ self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
+ }
+
+ fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId<Interner>) -> Arc<OpaqueTyDatum> {
+ let full_id = self.db.lookup_intern_impl_trait_id(id.into());
+ let bound = match full_id {
+ crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas = self
+ .db
+ .return_type_impl_traits(func)
+ .expect("impl trait id without impl traits");
+ let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders();
+ let data = &datas.impl_traits[idx as usize];
+ let bound = OpaqueTyDatumBound {
+ bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ chalk_ir::Binders::new(binders, bound)
+ }
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ if let Some((future_trait, future_output)) = self
+ .db
+ .lang_item(self.krate, SmolStr::new_inline("future_trait"))
+ .and_then(|item| item.as_trait())
+ .and_then(|trait_| {
+ let alias =
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])?;
+ Some((trait_, alias))
+ })
+ {
+ // Making up Symbol’s value as variable is void: AsyncBlock<T>:
+ //
+ // |--------------------OpaqueTyDatum-------------------|
+ // |-------------OpaqueTyDatumBound--------------|
+ // for<T> <Self> [Future<Self>, Future::Output<Self> = T]
+ // ^1 ^0 ^0 ^0 ^1
+ let impl_bound = WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(future_trait),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar {
+ debruijn: DebruijnIndex::INNERMOST,
+ index: 0,
+ })
+ .intern(Interner),
+ ),
+ });
+ let mut binder = vec![];
+ binder.push(crate::wrap_empty_binders(impl_bound));
+ let sized_trait = self
+ .db
+ .lang_item(self.krate, SmolStr::new_inline("sized"))
+ .and_then(|item| item.as_trait());
+ if let Some(sized_trait_) = sized_trait {
+ let sized_bound = WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(sized_trait_),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar {
+ debruijn: DebruijnIndex::INNERMOST,
+ index: 0,
+ })
+ .intern(Interner),
+ ),
+ });
+ binder.push(crate::wrap_empty_binders(sized_bound));
+ }
+ let proj_bound = WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(future_output),
+ // Self type as the first parameter.
+ substitution: Substitution::from1(
+ Interner,
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ ),
+ }),
+ // The parameter of the opaque type.
+ ty: TyKind::BoundVar(BoundVar { debruijn: DebruijnIndex::ONE, index: 0 })
+ .intern(Interner),
+ });
+ binder.push(crate::wrap_empty_binders(proj_bound));
+ let bound = OpaqueTyDatumBound {
+ bounds: make_single_type_binders(binder),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ // The opaque type has 1 parameter.
+ make_single_type_binders(bound)
+ } else {
+ // If failed to find Symbol’s value as variable is void: Future::Output, return empty bounds as fallback.
+ let bound = OpaqueTyDatumBound {
+ bounds: chalk_ir::Binders::empty(Interner, vec![]),
+ where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
+ };
+ // The opaque type has 1 parameter.
+ make_single_type_binders(bound)
+ }
+ }
+ };
+
+ Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound })
+ }
+
+ fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: actually provide the hidden type; it is relevant for auto traits
+ TyKind::Error.intern(Interner)
+ }
+
+ fn is_object_safe(&self, _trait_id: chalk_ir::TraitId<Interner>) -> bool {
+ // FIXME: implement actual object safety
+ true
+ }
+
+ fn closure_kind(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> rust_ir::ClosureKind {
+ // Fn is the closure kind that implements all three traits
+ rust_ir::ClosureKind::Fn
+ }
+ fn closure_inputs_and_output(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
+ let sig_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ let sig = &sig_ty.callable_sig(self.db).expect("first closure param should be fn ptr");
+ let io = rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig.params().to_vec(),
+ return_type: sig.ret().clone(),
+ };
+ chalk_ir::Binders::empty(Interner, io.shifted_in(Interner))
+ }
+ fn closure_upvars(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<chalk_ir::Ty<Interner>> {
+ let ty = TyBuilder::unit();
+ chalk_ir::Binders::empty(Interner, ty)
+ }
+ fn closure_fn_substitution(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Substitution<Interner> {
+ Substitution::empty(Interner)
+ }
+
+ fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
+ let id = from_chalk_trait_id(trait_id);
+ self.db.trait_data(id).name.to_string()
+ }
+ fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
+ match adt_id {
+ hir_def::AdtId::StructId(id) => self.db.struct_data(id).name.to_string(),
+ hir_def::AdtId::EnumId(id) => self.db.enum_data(id).name.to_string(),
+ hir_def::AdtId::UnionId(id) => self.db.union_data(id).name.to_string(),
+ }
+ }
+ fn adt_size_align(&self, _id: chalk_ir::AdtId<Interner>) -> Arc<rust_ir::AdtSizeAlign> {
+ // FIXME
+ Arc::new(rust_ir::AdtSizeAlign::from_one_zst(false))
+ }
+ fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
+ let id = self.db.associated_ty_data(assoc_ty_id).name;
+ self.db.type_alias_data(id).name.to_string()
+ }
+ fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
+ format!("Opaque_{}", opaque_ty_id.0)
+ }
+ fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
+ format!("fn_{}", fn_def_id.0)
+ }
+ fn generator_datum(
+ &self,
+ _: chalk_ir::GeneratorId<Interner>,
+ ) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorDatum<Interner>> {
+ // FIXME
+ unimplemented!()
+ }
+ fn generator_witness_datum(
+ &self,
+ _: chalk_ir::GeneratorId<Interner>,
+ ) -> std::sync::Arc<chalk_solve::rust_ir::GeneratorWitnessDatum<Interner>> {
+ // FIXME
+ unimplemented!()
+ }
+
+ fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase<Interner> {
+ &self.db
+ }
+}
+
+impl<'a> chalk_ir::UnificationDatabase<Interner> for &'a dyn HirDatabase {
+ fn fn_def_variance(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ ) -> chalk_ir::Variances<Interner> {
+ HirDatabase::fn_def_variance(*self, fn_def_id)
+ }
+
+ fn adt_variance(&self, adt_id: chalk_ir::AdtId<Interner>) -> chalk_ir::Variances<Interner> {
+ HirDatabase::adt_variance(*self, adt_id)
+ }
+}
+
+pub(crate) fn program_clauses_for_chalk_env_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ environment: chalk_ir::Environment<Interner>,
+) -> chalk_ir::ProgramClauses<Interner> {
+ chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
+}
+
+pub(crate) fn associated_ty_data_query(
+ db: &dyn HirDatabase,
+ id: AssocTypeId,
+) -> Arc<AssociatedTyDatum> {
+ debug!("associated_ty_data {:?}", id);
+ let type_alias: TypeAliasId = from_assoc_type_id(id);
+ let trait_ = match type_alias.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+
+ // Lower bounds -- we could/should maybe move this to a separate query in `lower`
+ let type_alias_data = db.type_alias_data(type_alias);
+ let generic_params = generics(db.upcast(), type_alias.into());
+ // let bound_vars = generic_params.bound_vars_subst(DebruijnIndex::INNERMOST);
+ let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
+ let ctx = crate::TyLoweringContext::new(db, &resolver)
+ .with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
+ let pro_ty = TyBuilder::assoc_type_projection(db, type_alias)
+ .fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
+ .build();
+ let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
+ let mut bounds: Vec<_> = type_alias_data
+ .bounds
+ .iter()
+ .flat_map(|bound| ctx.lower_type_bound(bound, self_ty.clone(), false))
+ .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
+ .collect();
+
+ if !ctx.unsized_types.borrow().contains(&self_ty) {
+ let sized_trait = db
+ .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+ let sized_bound = sized_trait.into_iter().map(|sized_trait| {
+ let trait_bound =
+ rust_ir::TraitBound { trait_id: sized_trait, args_no_self: Default::default() };
+ let inline_bound = rust_ir::InlineBound::TraitBound(trait_bound);
+ chalk_ir::Binders::empty(Interner, inline_bound)
+ });
+ bounds.extend(sized_bound);
+ bounds.shrink_to_fit();
+ }
+
+ // FIXME: Re-enable where clauses on associated types when an upstream chalk bug is fixed.
+ // (rust-analyzer#9052)
+ // let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
+ let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] };
+ let datum = AssociatedTyDatum {
+ trait_id: to_chalk_trait_id(trait_),
+ id,
+ name: type_alias,
+ binders: make_binders(db, &generic_params, bound_data),
+ };
+ Arc::new(datum)
+}
+
+pub(crate) fn trait_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ trait_id: TraitId,
+) -> Arc<TraitDatum> {
+ debug!("trait_datum {:?}", trait_id);
+ let trait_ = from_chalk_trait_id(trait_id);
+ let trait_data = db.trait_data(trait_);
+ debug!("trait {:?} = {:?}", trait_id, trait_data.name);
+ let generic_params = generics(db.upcast(), trait_.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let flags = rust_ir::TraitFlags {
+ auto: trait_data.is_auto,
+ upstream: trait_.lookup(db.upcast()).container.krate() != krate,
+ non_enumerable: true,
+ coinductive: false, // only relevant for Chalk testing
+ // FIXME: set these flags correctly
+ marker: false,
+ fundamental: false,
+ };
+ let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
+ let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
+ let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
+ let well_known =
+ lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));
+ let trait_datum = TraitDatum {
+ id: trait_id,
+ binders: make_binders(db, &generic_params, trait_datum_bound),
+ flags,
+ associated_ty_ids,
+ well_known,
+ };
+ Arc::new(trait_datum)
+}
+
+fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
+ Some(match name {
+ "clone" => WellKnownTrait::Clone,
+ "coerce_unsized" => WellKnownTrait::CoerceUnsized,
+ "copy" => WellKnownTrait::Copy,
+ "discriminant_kind" => WellKnownTrait::DiscriminantKind,
+ "dispatch_from_dyn" => WellKnownTrait::DispatchFromDyn,
+ "drop" => WellKnownTrait::Drop,
+ "fn" => WellKnownTrait::Fn,
+ "fn_mut" => WellKnownTrait::FnMut,
+ "fn_once" => WellKnownTrait::FnOnce,
+ "generator" => WellKnownTrait::Generator,
+ "sized" => WellKnownTrait::Sized,
+ "unpin" => WellKnownTrait::Unpin,
+ "unsize" => WellKnownTrait::Unsize,
+ _ => return None,
+ })
+}
+
+fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
+ match attr {
+ WellKnownTrait::Clone => "clone",
+ WellKnownTrait::CoerceUnsized => "coerce_unsized",
+ WellKnownTrait::Copy => "copy",
+ WellKnownTrait::DiscriminantKind => "discriminant_kind",
+ WellKnownTrait::DispatchFromDyn => "dispatch_from_dyn",
+ WellKnownTrait::Drop => "drop",
+ WellKnownTrait::Fn => "fn",
+ WellKnownTrait::FnMut => "fn_mut",
+ WellKnownTrait::FnOnce => "fn_once",
+ WellKnownTrait::Generator => "generator",
+ WellKnownTrait::Sized => "sized",
+ WellKnownTrait::Unpin => "unpin",
+ WellKnownTrait::Unsize => "unsize",
+ }
+}
+
+pub(crate) fn struct_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ struct_id: AdtId,
+) -> Arc<StructDatum> {
+ debug!("struct_datum {:?}", struct_id);
+ let chalk_ir::AdtId(adt_id) = struct_id;
+ let generic_params = generics(db.upcast(), adt_id.into());
+ let upstream = adt_id.module(db.upcast()).krate() != krate;
+ let where_clauses = {
+ let generic_params = generics(db.upcast(), adt_id.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ convert_where_clauses(db, adt_id.into(), &bound_vars)
+ };
+ let flags = rust_ir::AdtFlags {
+ upstream,
+ // FIXME set fundamental and phantom_data flags correctly
+ fundamental: false,
+ phantom_data: false,
+ };
+ // FIXME provide enum variants properly (for auto traits)
+ let variant = rust_ir::AdtVariantDatum {
+ fields: Vec::new(), // FIXME add fields (only relevant for auto traits),
+ };
+ let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses };
+ let struct_datum = StructDatum {
+ // FIXME set ADT kind
+ kind: rust_ir::AdtKind::Struct,
+ id: struct_id,
+ binders: make_binders(db, &generic_params, struct_datum_bound),
+ flags,
+ };
+ Arc::new(struct_datum)
+}
+
+pub(crate) fn impl_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ impl_id: ImplId,
+) -> Arc<ImplDatum> {
+ let _p = profile::span("impl_datum");
+ debug!("impl_datum {:?}", impl_id);
+ let impl_: hir_def::ImplId = from_chalk(db, impl_id);
+ impl_def_datum(db, krate, impl_id, impl_)
+}
+
+fn impl_def_datum(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ chalk_id: ImplId,
+ impl_id: hir_def::ImplId,
+) -> Arc<ImplDatum> {
+ let trait_ref = db
+ .impl_trait(impl_id)
+ // ImplIds for impls where the trait ref can't be resolved should never reach Chalk
+ .expect("invalid impl passed to Chalk")
+ .into_value_and_skipped_binders()
+ .0;
+ let impl_data = db.impl_data(impl_id);
+
+ let generic_params = generics(db.upcast(), impl_id.into());
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let trait_ = trait_ref.hir_trait_id();
+ let impl_type = if impl_id.lookup(db.upcast()).container.krate() == krate {
+ rust_ir::ImplType::Local
+ } else {
+ rust_ir::ImplType::External
+ };
+ let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
+ let negative = impl_data.is_negative;
+ debug!(
+ "impl {:?}: {}{} where {:?}",
+ chalk_id,
+ if negative { "!" } else { "" },
+ trait_ref.display(db),
+ where_clauses
+ );
+
+ let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
+
+ let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
+ let trait_data = db.trait_data(trait_);
+ let associated_ty_value_ids = impl_data
+ .items
+ .iter()
+ .filter_map(|item| match item {
+ AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
+ _ => None,
+ })
+ .filter(|&type_alias| {
+ // don't include associated types that don't exist in the trait
+ let name = &db.type_alias_data(type_alias).name;
+ trait_data.associated_type_by_name(name).is_some()
+ })
+ .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
+ .collect();
+ debug!("impl_datum: {:?}", impl_datum_bound);
+ let impl_datum = ImplDatum {
+ binders: make_binders(db, &generic_params, impl_datum_bound),
+ impl_type,
+ polarity,
+ associated_ty_value_ids,
+ };
+ Arc::new(impl_datum)
+}
+
+pub(crate) fn associated_ty_value_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ id: AssociatedTyValueId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias: TypeAliasAsValue = from_chalk(db, id);
+ type_alias_associated_ty_value(db, krate, type_alias.0)
+}
+
+fn type_alias_associated_ty_value(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ type_alias: TypeAliasId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias_data = db.type_alias_data(type_alias);
+ let impl_id = match type_alias.lookup(db.upcast()).container {
+ ItemContainerId::ImplId(it) => it,
+ _ => panic!("assoc ty value should be in impl"),
+ };
+
+ let trait_ref = db
+ .impl_trait(impl_id)
+ .expect("assoc ty value should not exist")
+ .into_value_and_skipped_binders()
+ .0; // we don't return any assoc ty values if the impl'd trait can't be resolved
+
+ let assoc_ty = db
+ .trait_data(trait_ref.hir_trait_id())
+ .associated_type_by_name(&type_alias_data.name)
+ .expect("assoc ty value should not exist"); // validated when building the impl data as well
+ let (ty, binders) = db.ty(type_alias.into()).into_value_and_skipped_binders();
+ let value_bound = rust_ir::AssociatedTyValueBound { ty };
+ let value = rust_ir::AssociatedTyValue {
+ impl_id: impl_id.to_chalk(db),
+ associated_ty_id: to_assoc_type_id(assoc_ty),
+ value: chalk_ir::Binders::new(binders, value_bound),
+ };
+ Arc::new(value)
+}
+
+pub(crate) fn fn_def_datum_query(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ fn_def_id: FnDefId,
+) -> Arc<FnDefDatum> {
+ let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+ let generic_params = generics(db.upcast(), callable_def.into());
+ let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
+ let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars);
+ let bound = rust_ir::FnDefDatumBound {
+ // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
+ inputs_and_output: chalk_ir::Binders::empty(
+ Interner,
+ rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig.params().to_vec(),
+ return_type: sig.ret().clone(),
+ }
+ .shifted_in(Interner),
+ ),
+ where_clauses,
+ };
+ let datum = FnDefDatum {
+ id: fn_def_id,
+ sig: chalk_ir::FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: sig.is_varargs },
+ binders: chalk_ir::Binders::new(binders, bound),
+ };
+ Arc::new(datum)
+}
+
+pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
+ let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+ let generic_params = generics(db.upcast(), callable_def.into());
+ Variances::from_iter(
+ Interner,
+ std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
+ )
+}
+
+pub(crate) fn adt_variance_query(
+ db: &dyn HirDatabase,
+ chalk_ir::AdtId(adt_id): AdtId,
+) -> Variances {
+ let generic_params = generics(db.upcast(), adt_id.into());
+ Variances::from_iter(
+ Interner,
+ std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()),
+ )
+}
+
+pub(super) fn convert_where_clauses(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ substs: &Substitution,
+) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
+ let generic_predicates = db.generic_predicates(def);
+ let mut result = Vec::with_capacity(generic_predicates.len());
+ for pred in generic_predicates.iter() {
+ result.push(pred.clone().substitute(Interner, substs));
+ }
+ result
+}
+
+pub(super) fn generic_predicate_to_inline_bound(
+ db: &dyn HirDatabase,
+ pred: &QuantifiedWhereClause,
+ self_ty: &Ty,
+) -> Option<chalk_ir::Binders<rust_ir::InlineBound<Interner>>> {
+ // An InlineBound is like a GenericPredicate, except the self type is left out.
+ // We don't have a special type for this, but Chalk does.
+ let self_ty_shifted_in = self_ty.clone().shifted_in_from(Interner, DebruijnIndex::ONE);
+ let (pred, binders) = pred.as_ref().into_value_and_skipped_binders();
+ match pred {
+ WhereClause::Implemented(trait_ref) => {
+ if trait_ref.self_type_parameter(Interner) != self_ty_shifted_in {
+ // we can only convert predicates back to type bounds if they
+ // have the expected self type
+ return None;
+ }
+ let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
+ .iter()
+ .map(|ty| ty.clone().cast(Interner))
+ .collect();
+ let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
+ Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
+ }
+ WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
+ if projection_ty.self_type_parameter(Interner) != self_ty_shifted_in {
+ return None;
+ }
+ let trait_ = projection_ty.trait_(db);
+ let args_no_self = projection_ty.substitution.as_slice(Interner)[1..]
+ .iter()
+ .map(|ty| ty.clone().cast(Interner))
+ .collect();
+ let alias_eq_bound = rust_ir::AliasEqBound {
+ value: ty.clone(),
+ trait_bound: rust_ir::TraitBound {
+ trait_id: to_chalk_trait_id(trait_),
+ args_no_self,
+ },
+ associated_ty_id: projection_ty.associated_ty_id,
+ parameters: Vec::new(), // FIXME we don't support generic associated types yet
+ };
+ Some(chalk_ir::Binders::new(
+ binders,
+ rust_ir::InlineBound::AliasEqBound(alias_eq_bound),
+ ))
+ }
+ _ => None,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
new file mode 100644
index 000000000..a9c124b42
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -0,0 +1,358 @@
+//! Various extensions traits for Chalk types.
+
+use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, UintTy};
+use hir_def::{
+ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
+ generics::TypeOrConstParamData,
+ type_ref::Rawness,
+ FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
+};
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
+ from_placeholder_idx, to_chalk_trait_id, AdtId, AliasEq, AliasTy, Binders, CallableDefId,
+ CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy, QuantifiedWhereClause,
+ Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
+};
+
+pub trait TyExt {
+ fn is_unit(&self) -> bool;
+ fn is_never(&self) -> bool;
+ fn is_unknown(&self) -> bool;
+ fn is_ty_var(&self) -> bool;
+
+ fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
+ fn as_builtin(&self) -> Option<BuiltinType>;
+ fn as_tuple(&self) -> Option<&Substitution>;
+ fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId>;
+ fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)>;
+ fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)>;
+ fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId>;
+
+ fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId>;
+ fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig>;
+
+ fn strip_references(&self) -> &Ty;
+ fn strip_reference(&self) -> &Ty;
+
+ /// If this is a `dyn Trait`, returns that trait.
+ fn dyn_trait(&self) -> Option<TraitId>;
+
+ fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>>;
+ fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId>;
+
+ /// FIXME: Get rid of this, it's not a good abstraction
+ fn equals_ctor(&self, other: &Ty) -> bool;
+}
+
+impl TyExt for Ty {
+ fn is_unit(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Tuple(0, _))
+ }
+
+ fn is_never(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Never)
+ }
+
+ fn is_unknown(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::Error)
+ }
+
+ fn is_ty_var(&self) -> bool {
+ matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
+ }
+
+ fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)> {
+ match self.kind(Interner) {
+ TyKind::Adt(AdtId(adt), parameters) => Some((*adt, parameters)),
+ _ => None,
+ }
+ }
+
+ fn as_builtin(&self) -> Option<BuiltinType> {
+ match self.kind(Interner) {
+ TyKind::Str => Some(BuiltinType::Str),
+ TyKind::Scalar(Scalar::Bool) => Some(BuiltinType::Bool),
+ TyKind::Scalar(Scalar::Char) => Some(BuiltinType::Char),
+ TyKind::Scalar(Scalar::Float(fty)) => Some(BuiltinType::Float(match fty {
+ FloatTy::F64 => BuiltinFloat::F64,
+ FloatTy::F32 => BuiltinFloat::F32,
+ })),
+ TyKind::Scalar(Scalar::Int(ity)) => Some(BuiltinType::Int(match ity {
+ IntTy::Isize => BuiltinInt::Isize,
+ IntTy::I8 => BuiltinInt::I8,
+ IntTy::I16 => BuiltinInt::I16,
+ IntTy::I32 => BuiltinInt::I32,
+ IntTy::I64 => BuiltinInt::I64,
+ IntTy::I128 => BuiltinInt::I128,
+ })),
+ TyKind::Scalar(Scalar::Uint(ity)) => Some(BuiltinType::Uint(match ity {
+ UintTy::Usize => BuiltinUint::Usize,
+ UintTy::U8 => BuiltinUint::U8,
+ UintTy::U16 => BuiltinUint::U16,
+ UintTy::U32 => BuiltinUint::U32,
+ UintTy::U64 => BuiltinUint::U64,
+ UintTy::U128 => BuiltinUint::U128,
+ })),
+ _ => None,
+ }
+ }
+
+ fn as_tuple(&self) -> Option<&Substitution> {
+ match self.kind(Interner) {
+ TyKind::Tuple(_, substs) => Some(substs),
+ _ => None,
+ }
+ }
+
+ fn as_fn_def(&self, db: &dyn HirDatabase) -> Option<FunctionId> {
+ match self.callable_def(db) {
+ Some(CallableDefId::FunctionId(func)) => Some(func),
+ Some(CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_)) | None => None,
+ }
+ }
+ fn as_reference(&self) -> Option<(&Ty, Lifetime, Mutability)> {
+ match self.kind(Interner) {
+ TyKind::Ref(mutability, lifetime, ty) => Some((ty, lifetime.clone(), *mutability)),
+ _ => None,
+ }
+ }
+
+ fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
+ match self.kind(Interner) {
+ TyKind::Ref(mutability, _, ty) => Some((ty, Rawness::Ref, *mutability)),
+ TyKind::Raw(mutability, ty) => Some((ty, Rawness::RawPtr, *mutability)),
+ _ => None,
+ }
+ }
+
+ fn as_generic_def(&self, db: &dyn HirDatabase) -> Option<GenericDefId> {
+ match *self.kind(Interner) {
+ TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
+ TyKind::FnDef(callable, ..) => {
+ Some(db.lookup_intern_callable_def(callable.into()).into())
+ }
+ TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
+ TyKind::Foreign(type_alias, ..) => Some(from_foreign_def_id(type_alias).into()),
+ _ => None,
+ }
+ }
+
+ fn callable_def(&self, db: &dyn HirDatabase) -> Option<CallableDefId> {
+ match self.kind(Interner) {
+ &TyKind::FnDef(def, ..) => Some(db.lookup_intern_callable_def(def.into())),
+ _ => None,
+ }
+ }
+
+ fn callable_sig(&self, db: &dyn HirDatabase) -> Option<CallableSig> {
+ match self.kind(Interner) {
+ TyKind::Function(fn_ptr) => Some(CallableSig::from_fn_ptr(fn_ptr)),
+ TyKind::FnDef(def, parameters) => {
+ let callable_def = db.lookup_intern_callable_def((*def).into());
+ let sig = db.callable_item_signature(callable_def);
+ Some(sig.substitute(Interner, &parameters))
+ }
+ TyKind::Closure(.., substs) => {
+ let sig_param = substs.at(Interner, 0).assert_ty_ref(Interner);
+ sig_param.callable_sig(db)
+ }
+ _ => None,
+ }
+ }
+
+ fn dyn_trait(&self) -> Option<TraitId> {
+ let trait_ref = match self.kind(Interner) {
+ TyKind::Dyn(dyn_ty) => dyn_ty.bounds.skip_binders().interned().get(0).and_then(|b| {
+ match b.skip_binders() {
+ WhereClause::Implemented(trait_ref) => Some(trait_ref),
+ _ => None,
+ }
+ }),
+ _ => None,
+ }?;
+ Some(from_chalk_trait_id(trait_ref.trait_id))
+ }
+
+ fn strip_references(&self) -> &Ty {
+ let mut t: &Ty = self;
+ while let TyKind::Ref(_mutability, _lifetime, ty) = t.kind(Interner) {
+ t = ty;
+ }
+ t
+ }
+
+ fn strip_reference(&self) -> &Ty {
+ self.as_reference().map_or(self, |(ty, _, _)| ty)
+ }
+
+ fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<QuantifiedWhereClause>> {
+ match self.kind(Interner) {
+ TyKind::OpaqueType(opaque_ty_id, subst) => {
+ match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
+ ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
+ let krate = def.module(db.upcast()).krate();
+ if let Some(future_trait) = db
+ .lang_item(krate, SmolStr::new_inline("future_trait"))
+ .and_then(|item| item.as_trait())
+ {
+ // This is only used by type walking.
+ // Parameters will be walked outside, and projection predicate is not used.
+ // So just provide the Future trait.
+ let impl_bound = Binders::empty(
+ Interner,
+ WhereClause::Implemented(TraitRef {
+ trait_id: to_chalk_trait_id(future_trait),
+ substitution: Substitution::empty(Interner),
+ }),
+ );
+ Some(vec![impl_bound])
+ } else {
+ None
+ }
+ }
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ db.return_type_impl_traits(func).map(|it| {
+ let data = (*it)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.substitute(Interner, &subst).into_value_and_skipped_binders().0
+ })
+ }
+ }
+ }
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ let predicates = match db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into())
+ {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ db.return_type_impl_traits(func).map(|it| {
+ let data = (*it)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.substitute(Interner, &opaque_ty.substitution)
+ })
+ }
+ // It always has an parameter for Future::Output type.
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => unreachable!(),
+ };
+
+ predicates.map(|it| it.into_value_and_skipped_binders().0)
+ }
+ TyKind::Placeholder(idx) => {
+ let id = from_placeholder_idx(db, *idx);
+ let generic_params = db.generic_params(id.parent);
+ let param_data = &generic_params.type_or_consts[id.local_id];
+ match param_data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
+ let substs = TyBuilder::placeholder_subst(db, id.parent);
+ let predicates = db
+ .generic_predicates(id.parent)
+ .iter()
+ .map(|pred| pred.clone().substitute(Interner, &substs))
+ .filter(|wc| match &wc.skip_binders() {
+ WhereClause::Implemented(tr) => {
+ &tr.self_type_parameter(Interner) == self
+ }
+ WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(proj),
+ ty: _,
+ }) => &proj.self_type_parameter(Interner) == self,
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+
+ Some(predicates)
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+
+ fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
+ match self.kind(Interner) {
+ TyKind::AssociatedType(id, ..) => {
+ match from_assoc_type_id(*id).lookup(db.upcast()).container {
+ ItemContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ TyKind::Alias(AliasTy::Projection(projection_ty)) => {
+ match from_assoc_type_id(projection_ty.associated_ty_id)
+ .lookup(db.upcast())
+ .container
+ {
+ ItemContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+
+ fn equals_ctor(&self, other: &Ty) -> bool {
+ match (self.kind(Interner), other.kind(Interner)) {
+ (TyKind::Adt(adt, ..), TyKind::Adt(adt2, ..)) => adt == adt2,
+ (TyKind::Slice(_), TyKind::Slice(_)) | (TyKind::Array(_, _), TyKind::Array(_, _)) => {
+ true
+ }
+ (TyKind::FnDef(def_id, ..), TyKind::FnDef(def_id2, ..)) => def_id == def_id2,
+ (TyKind::OpaqueType(ty_id, ..), TyKind::OpaqueType(ty_id2, ..)) => ty_id == ty_id2,
+ (TyKind::AssociatedType(ty_id, ..), TyKind::AssociatedType(ty_id2, ..)) => {
+ ty_id == ty_id2
+ }
+ (TyKind::Foreign(ty_id, ..), TyKind::Foreign(ty_id2, ..)) => ty_id == ty_id2,
+ (TyKind::Closure(id1, _), TyKind::Closure(id2, _)) => id1 == id2,
+ (TyKind::Ref(mutability, ..), TyKind::Ref(mutability2, ..))
+ | (TyKind::Raw(mutability, ..), TyKind::Raw(mutability2, ..)) => {
+ mutability == mutability2
+ }
+ (
+ TyKind::Function(FnPointer { num_binders, sig, .. }),
+ TyKind::Function(FnPointer { num_binders: num_binders2, sig: sig2, .. }),
+ ) => num_binders == num_binders2 && sig == sig2,
+ (TyKind::Tuple(cardinality, _), TyKind::Tuple(cardinality2, _)) => {
+ cardinality == cardinality2
+ }
+ (TyKind::Str, TyKind::Str) | (TyKind::Never, TyKind::Never) => true,
+ (TyKind::Scalar(scalar), TyKind::Scalar(scalar2)) => scalar == scalar2,
+ _ => false,
+ }
+ }
+}
+
+pub trait ProjectionTyExt {
+ fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef;
+ fn trait_(&self, db: &dyn HirDatabase) -> TraitId;
+}
+
+impl ProjectionTyExt for ProjectionTy {
+ fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
+ TraitRef {
+ trait_id: to_chalk_trait_id(self.trait_(db)),
+ substitution: self.substitution.clone(),
+ }
+ }
+
+ fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
+ match from_assoc_type_id(self.associated_ty_id).lookup(db.upcast()).container {
+ ItemContainerId::TraitId(it) => it,
+ _ => panic!("projection ty without parent trait"),
+ }
+ }
+}
+
+pub trait TraitRefExt {
+ fn hir_trait_id(&self) -> TraitId;
+}
+
+impl TraitRefExt for TraitRef {
+ fn hir_trait_id(&self) -> TraitId {
+ from_chalk_trait_id(self.trait_id)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
new file mode 100644
index 000000000..0495a4e64
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -0,0 +1,469 @@
+//! Constant evaluation details
+
+use std::{
+ collections::HashMap,
+ convert::TryInto,
+ fmt::{Display, Write},
+};
+
+use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar};
+use hir_def::{
+ expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId},
+ path::ModPath,
+ resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
+ type_ref::ConstScalar,
+ ConstId, DefWithBodyId,
+};
+use la_arena::{Arena, Idx};
+use stdx::never;
+
+use crate::{
+ db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
+ utils::Generics, Const, ConstData, ConstValue, GenericArg, InferenceResult, Interner, Ty,
+ TyBuilder, TyKind,
+};
+
+/// Extension trait for [`Const`]
+pub trait ConstExt {
+ /// Is a [`Const`] unknown?
+ fn is_unknown(&self) -> bool;
+}
+
+impl ConstExt for Const {
+ fn is_unknown(&self) -> bool {
+ match self.data(Interner).value {
+ // interned Unknown
+ chalk_ir::ConstValue::Concrete(chalk_ir::ConcreteConst {
+ interned: ConstScalar::Unknown,
+ }) => true,
+
+ // interned concrete anything else
+ chalk_ir::ConstValue::Concrete(..) => false,
+
+ _ => {
+ tracing::error!(
+ "is_unknown was called on a non-concrete constant value! {:?}",
+ self
+ );
+ true
+ }
+ }
+ }
+}
+
+pub struct ConstEvalCtx<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub owner: DefWithBodyId,
+ pub exprs: &'a Arena<Expr>,
+ pub pats: &'a Arena<Pat>,
+ pub local_data: HashMap<PatId, ComputedExpr>,
+ infer: &'a InferenceResult,
+}
+
+impl ConstEvalCtx<'_> {
+ fn expr_ty(&mut self, expr: ExprId) -> Ty {
+ self.infer[expr].clone()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ConstEvalError {
+ NotSupported(&'static str),
+ SemanticError(&'static str),
+ Loop,
+ IncompleteExpr,
+ Panic(String),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ComputedExpr {
+ Literal(Literal),
+ Tuple(Box<[ComputedExpr]>),
+}
+
+impl Display for ComputedExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ ComputedExpr::Literal(l) => match l {
+ Literal::Int(x, _) => {
+ if *x >= 10 {
+ write!(f, "{} ({:#X})", x, x)
+ } else {
+ x.fmt(f)
+ }
+ }
+ Literal::Uint(x, _) => {
+ if *x >= 10 {
+ write!(f, "{} ({:#X})", x, x)
+ } else {
+ x.fmt(f)
+ }
+ }
+ Literal::Float(x, _) => x.fmt(f),
+ Literal::Bool(x) => x.fmt(f),
+ Literal::Char(x) => std::fmt::Debug::fmt(x, f),
+ Literal::String(x) => std::fmt::Debug::fmt(x, f),
+ Literal::ByteString(x) => std::fmt::Debug::fmt(x, f),
+ },
+ ComputedExpr::Tuple(t) => {
+ f.write_char('(')?;
+ for x in &**t {
+ x.fmt(f)?;
+ f.write_str(", ")?;
+ }
+ f.write_char(')')
+ }
+ }
+ }
+}
+
+fn scalar_max(scalar: &Scalar) -> i128 {
+ match scalar {
+ Scalar::Bool => 1,
+ Scalar::Char => u32::MAX as i128,
+ Scalar::Int(x) => match x {
+ IntTy::Isize => isize::MAX as i128,
+ IntTy::I8 => i8::MAX as i128,
+ IntTy::I16 => i16::MAX as i128,
+ IntTy::I32 => i32::MAX as i128,
+ IntTy::I64 => i64::MAX as i128,
+ IntTy::I128 => i128::MAX as i128,
+ },
+ Scalar::Uint(x) => match x {
+ chalk_ir::UintTy::Usize => usize::MAX as i128,
+ chalk_ir::UintTy::U8 => u8::MAX as i128,
+ chalk_ir::UintTy::U16 => u16::MAX as i128,
+ chalk_ir::UintTy::U32 => u32::MAX as i128,
+ chalk_ir::UintTy::U64 => u64::MAX as i128,
+ chalk_ir::UintTy::U128 => i128::MAX as i128, // ignore too big u128 for now
+ },
+ Scalar::Float(_) => 0,
+ }
+}
+
+fn is_valid(scalar: &Scalar, value: i128) -> bool {
+ if value < 0 {
+ !matches!(scalar, Scalar::Uint(_)) && -scalar_max(scalar) - 1 <= value
+ } else {
+ value <= scalar_max(scalar)
+ }
+}
+
+pub fn eval_const(
+ expr_id: ExprId,
+ ctx: &mut ConstEvalCtx<'_>,
+) -> Result<ComputedExpr, ConstEvalError> {
+ let expr = &ctx.exprs[expr_id];
+ match expr {
+ Expr::Missing => Err(ConstEvalError::IncompleteExpr),
+ Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
+ &Expr::UnaryOp { expr, op } => {
+ let ty = &ctx.expr_ty(expr);
+ let ev = eval_const(expr, ctx)?;
+ match op {
+ hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
+ hir_def::expr::UnaryOp::Not => {
+ let v = match ev {
+ ComputedExpr::Literal(Literal::Bool(b)) => {
+ return Ok(ComputedExpr::Literal(Literal::Bool(!b)))
+ }
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => v
+ .try_into()
+ .map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ let r = match ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Uint(x)) => match x {
+ chalk_ir::UintTy::U8 => !(v as u8) as i128,
+ chalk_ir::UintTy::U16 => !(v as u16) as i128,
+ chalk_ir::UintTy::U32 => !(v as u32) as i128,
+ chalk_ir::UintTy::U64 => !(v as u64) as i128,
+ chalk_ir::UintTy::U128 => {
+ return Err(ConstEvalError::NotSupported("negation of u128"))
+ }
+ chalk_ir::UintTy::Usize => !(v as usize) as i128,
+ },
+ TyKind::Scalar(Scalar::Int(x)) => match x {
+ chalk_ir::IntTy::I8 => !(v as i8) as i128,
+ chalk_ir::IntTy::I16 => !(v as i16) as i128,
+ chalk_ir::IntTy::I32 => !(v as i32) as i128,
+ chalk_ir::IntTy::I64 => !(v as i64) as i128,
+ chalk_ir::IntTy::I128 => !v,
+ chalk_ir::IntTy::Isize => !(v as isize) as i128,
+ },
+ _ => return Err(ConstEvalError::NotSupported("unreachable?")),
+ };
+ Ok(ComputedExpr::Literal(Literal::Int(r, None)))
+ }
+ hir_def::expr::UnaryOp::Neg => {
+ let v = match ev {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => v
+ .try_into()
+ .map_err(|_| ConstEvalError::NotSupported("too big u128"))?,
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ Ok(ComputedExpr::Literal(Literal::Int(
+ v.checked_neg().ok_or_else(|| {
+ ConstEvalError::Panic("overflow in negation".to_string())
+ })?,
+ None,
+ )))
+ }
+ }
+ }
+ &Expr::BinaryOp { lhs, rhs, op } => {
+ let ty = &ctx.expr_ty(lhs);
+ let lhs = eval_const(lhs, ctx)?;
+ let rhs = eval_const(rhs, ctx)?;
+ let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
+ let v1 = match lhs {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => {
+ v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
+ }
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ let v2 = match rhs {
+ ComputedExpr::Literal(Literal::Int(v, _)) => v,
+ ComputedExpr::Literal(Literal::Uint(v, _)) => {
+ v.try_into().map_err(|_| ConstEvalError::NotSupported("too big u128"))?
+ }
+ _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
+ };
+ match op {
+ BinaryOp::ArithOp(b) => {
+ let panic_arith = ConstEvalError::Panic(
+ "attempt to run invalid arithmetic operation".to_string(),
+ );
+ let r = match b {
+ ArithOp::Add => v1.checked_add(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Mul => v1.checked_mul(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Sub => v1.checked_sub(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Div => v1.checked_div(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Rem => v1.checked_rem(v2).ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Shl => v1
+ .checked_shl(v2.try_into().map_err(|_| panic_arith.clone())?)
+ .ok_or_else(|| panic_arith.clone())?,
+ ArithOp::Shr => v1
+ .checked_shr(v2.try_into().map_err(|_| panic_arith.clone())?)
+ .ok_or_else(|| panic_arith.clone())?,
+ ArithOp::BitXor => v1 ^ v2,
+ ArithOp::BitOr => v1 | v2,
+ ArithOp::BitAnd => v1 & v2,
+ };
+ if let TyKind::Scalar(s) = ty.kind(Interner) {
+ if !is_valid(s, r) {
+ return Err(panic_arith);
+ }
+ }
+ Ok(ComputedExpr::Literal(Literal::Int(r, None)))
+ }
+ BinaryOp::LogicOp(_) => Err(ConstEvalError::SemanticError("logic op on numbers")),
+ _ => Err(ConstEvalError::NotSupported("bin op on this operators")),
+ }
+ }
+ Expr::Block { statements, tail, .. } => {
+ let mut prev_values = HashMap::<PatId, Option<ComputedExpr>>::default();
+ for statement in &**statements {
+ match *statement {
+ hir_def::expr::Statement::Let { pat: pat_id, initializer, .. } => {
+ let pat = &ctx.pats[pat_id];
+ match pat {
+ Pat::Bind { subpat, .. } if subpat.is_none() => (),
+ _ => {
+ return Err(ConstEvalError::NotSupported("complex patterns in let"))
+ }
+ };
+ let value = match initializer {
+ Some(x) => eval_const(x, ctx)?,
+ None => continue,
+ };
+ if !prev_values.contains_key(&pat_id) {
+ let prev = ctx.local_data.insert(pat_id, value);
+ prev_values.insert(pat_id, prev);
+ } else {
+ ctx.local_data.insert(pat_id, value);
+ }
+ }
+ hir_def::expr::Statement::Expr { .. } => {
+ return Err(ConstEvalError::NotSupported("this kind of statement"))
+ }
+ }
+ }
+ let r = match tail {
+ &Some(x) => eval_const(x, ctx),
+ None => Ok(ComputedExpr::Tuple(Box::new([]))),
+ };
+ // clean up local data, so caller will receive the exact map that passed to us
+ for (name, val) in prev_values {
+ match val {
+ Some(x) => ctx.local_data.insert(name, x),
+ None => ctx.local_data.remove(&name),
+ };
+ }
+ r
+ }
+ Expr::Path(p) => {
+ let resolver = resolver_for_expr(ctx.db.upcast(), ctx.owner, expr_id);
+ let pr = resolver
+ .resolve_path_in_value_ns(ctx.db.upcast(), p.mod_path())
+ .ok_or(ConstEvalError::SemanticError("unresolved path"))?;
+ let pr = match pr {
+ ResolveValueResult::ValueNs(v) => v,
+ ResolveValueResult::Partial(..) => {
+ return match ctx
+ .infer
+ .assoc_resolutions_for_expr(expr_id)
+ .ok_or(ConstEvalError::SemanticError("unresolved assoc item"))?
+ {
+ hir_def::AssocItemId::FunctionId(_) => {
+ Err(ConstEvalError::NotSupported("assoc function"))
+ }
+ hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c),
+ hir_def::AssocItemId::TypeAliasId(_) => {
+ Err(ConstEvalError::NotSupported("assoc type alias"))
+ }
+ }
+ }
+ };
+ match pr {
+ ValueNs::LocalBinding(pat_id) => {
+ let r = ctx
+ .local_data
+ .get(&pat_id)
+ .ok_or(ConstEvalError::NotSupported("Unexpected missing local"))?;
+ Ok(r.clone())
+ }
+ ValueNs::ConstId(id) => ctx.db.const_eval(id),
+ ValueNs::GenericParam(_) => {
+ Err(ConstEvalError::NotSupported("const generic without substitution"))
+ }
+ _ => Err(ConstEvalError::NotSupported("path that are not const or local")),
+ }
+ }
+ _ => Err(ConstEvalError::NotSupported("This kind of expression")),
+ }
+}
+
+pub(crate) fn path_to_const(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &ModPath,
+ mode: ParamLoweringMode,
+ args_lazy: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Option<Const> {
+ match resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::GenericParam(p)) => {
+ let ty = db.const_param_ty(p);
+ let args = args_lazy();
+ let value = match mode {
+ ParamLoweringMode::Placeholder => {
+ ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
+ }
+ ParamLoweringMode::Variable => match args.param_idx(p.into()) {
+ Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
+ None => {
+ never!(
+ "Generic list doesn't contain this param: {:?}, {}, {:?}",
+ args,
+ path,
+ p
+ );
+ return None;
+ }
+ },
+ };
+ Some(ConstData { ty, value }.intern(Interner))
+ }
+ _ => None,
+ }
+}
+
+pub fn unknown_const(ty: Ty) -> Const {
+ ConstData {
+ ty,
+ value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: ConstScalar::Unknown }),
+ }
+ .intern(Interner)
+}
+
+pub fn unknown_const_as_generic(ty: Ty) -> GenericArg {
+ GenericArgData::Const(unknown_const(ty)).intern(Interner)
+}
+
+/// Interns a constant scalar with the given type
+pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
+ ConstData { ty, value: ConstValue::Concrete(chalk_ir::ConcreteConst { interned: value }) }
+ .intern(Interner)
+}
+
+/// Interns a possibly-unknown target usize
+pub fn usize_const(value: Option<u128>) -> Const {
+ intern_const_scalar(value.map_or(ConstScalar::Unknown, ConstScalar::UInt), TyBuilder::usize())
+}
+
+pub(crate) fn const_eval_recover(
+ _: &dyn HirDatabase,
+ _: &[String],
+ _: &ConstId,
+) -> Result<ComputedExpr, ConstEvalError> {
+ Err(ConstEvalError::Loop)
+}
+
+pub(crate) fn const_eval_query(
+ db: &dyn HirDatabase,
+ const_id: ConstId,
+) -> Result<ComputedExpr, ConstEvalError> {
+ let def = const_id.into();
+ let body = db.body(def);
+ let infer = &db.infer(def);
+ let result = eval_const(
+ body.body_expr,
+ &mut ConstEvalCtx {
+ db,
+ owner: const_id.into(),
+ exprs: &body.exprs,
+ pats: &body.pats,
+ local_data: HashMap::default(),
+ infer,
+ },
+ );
+ result
+}
+
+pub(crate) fn eval_to_const<'a>(
+ expr: Idx<Expr>,
+ mode: ParamLoweringMode,
+ ctx: &mut InferenceContext<'a>,
+ args: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Const {
+ if let Expr::Path(p) = &ctx.body.exprs[expr] {
+ let db = ctx.db;
+ let resolver = &ctx.resolver;
+ if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
+ return c;
+ }
+ }
+ let body = ctx.body.clone();
+ let mut ctx = ConstEvalCtx {
+ db: ctx.db,
+ owner: ctx.owner,
+ exprs: &body.exprs,
+ pats: &body.pats,
+ local_data: HashMap::default(),
+ infer: &ctx.result,
+ };
+ let computed_expr = eval_const(expr, &mut ctx);
+ let const_scalar = match computed_expr {
+ Ok(ComputedExpr::Literal(literal)) => literal.into(),
+ _ => ConstScalar::Unknown,
+ };
+ intern_const_scalar(const_scalar, TyBuilder::usize())
+}
+
+#[cfg(test)]
+mod tests;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
new file mode 100644
index 000000000..4a052851a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -0,0 +1,148 @@
+use base_db::fixture::WithFixture;
+use hir_def::{db::DefDatabase, expr::Literal};
+
+use crate::{consteval::ComputedExpr, db::HirDatabase, test_db::TestDB};
+
+use super::ConstEvalError;
+
+fn check_fail(ra_fixture: &str, error: ConstEvalError) {
+ assert_eq!(eval_goal(ra_fixture), Err(error));
+}
+
+fn check_number(ra_fixture: &str, answer: i128) {
+ let r = eval_goal(ra_fixture).unwrap();
+ match r {
+ ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer),
+ ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128),
+ x => panic!("Expected number but found {:?}", x),
+ }
+}
+
+fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
+ let (db, file_id) = TestDB::with_single_file(ra_fixture);
+ let module_id = db.module_for_file(file_id);
+ let def_map = module_id.def_map(&db);
+ let scope = &def_map[module_id.local_id].scope;
+ let const_id = scope
+ .declarations()
+ .into_iter()
+ .find_map(|x| match x {
+ hir_def::ModuleDefId::ConstId(x) => {
+ if db.const_data(x).name.as_ref()?.to_string() == "GOAL" {
+ Some(x)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ })
+ .unwrap();
+ db.const_eval(const_id)
+}
+
+#[test]
+fn add() {
+ check_number(r#"const GOAL: usize = 2 + 2;"#, 4);
+}
+
+#[test]
+fn bit_op() {
+ check_number(r#"const GOAL: u8 = !0 & !(!0 >> 1)"#, 128);
+ check_number(r#"const GOAL: i8 = !0 & !(!0 >> 1)"#, 0);
+ // FIXME: rustc evaluate this to -128
+ check_fail(
+ r#"const GOAL: i8 = 1 << 7"#,
+ ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
+ );
+ check_fail(
+ r#"const GOAL: i8 = 1 << 8"#,
+ ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
+ );
+}
+
+#[test]
+fn locals() {
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let a = 3 + 2;
+ let b = a * a;
+ b
+ };
+ "#,
+ 25,
+ );
+}
+
+#[test]
+fn consts() {
+ check_number(
+ r#"
+ const F1: i32 = 1;
+ const F3: i32 = 3 * F2;
+ const F2: i32 = 2 * F1;
+ const GOAL: i32 = F3;
+ "#,
+ 6,
+ );
+}
+
+#[test]
+fn const_loop() {
+ check_fail(
+ r#"
+ const F1: i32 = 1 * F3;
+ const F3: i32 = 3 * F2;
+ const F2: i32 = 2 * F1;
+ const GOAL: i32 = F3;
+ "#,
+ ConstEvalError::Loop,
+ );
+}
+
+#[test]
+fn const_impl_assoc() {
+ check_number(
+ r#"
+ struct U5;
+ impl U5 {
+ const VAL: usize = 5;
+ }
+ const GOAL: usize = U5::VAL;
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn const_generic_subst() {
+ // FIXME: this should evaluate to 5
+ check_fail(
+ r#"
+ struct Adder<const N: usize, const M: usize>;
+ impl<const N: usize, const M: usize> Adder<N, M> {
+ const VAL: usize = N + M;
+ }
+ const GOAL: usize = Adder::<2, 3>::VAL;
+ "#,
+ ConstEvalError::NotSupported("const generic without substitution"),
+ );
+}
+
+#[test]
+fn const_trait_assoc() {
+ // FIXME: this should evaluate to 0
+ check_fail(
+ r#"
+ struct U0;
+ trait ToConst {
+ const VAL: usize;
+ }
+ impl ToConst for U0 {
+ const VAL: usize = 0;
+ }
+ const GOAL: usize = U0::VAL;
+ "#,
+ ConstEvalError::IncompleteExpr,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
new file mode 100644
index 000000000..b385b1caf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -0,0 +1,225 @@
+//! The home of `HirDatabase`, which is the Salsa database containing all the
+//! type inference-related queries.
+
+use std::sync::Arc;
+
+use arrayvec::ArrayVec;
+use base_db::{impl_intern_key, salsa, CrateId, Upcast};
+use hir_def::{
+ db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, FunctionId,
+ GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId,
+};
+use la_arena::ArenaMap;
+
+use crate::{
+ chalk_db,
+ consteval::{ComputedExpr, ConstEvalError},
+ method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
+ Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig,
+ QuantifiedWhereClause, ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId,
+};
+use hir_expand::name::Name;
+
+#[salsa::query_group(HirDatabaseStorage)]
+pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
+ #[salsa::invoke(infer_wait)]
+ #[salsa::transparent]
+ fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::infer::infer_query)]
+ fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::lower::ty_query)]
+ #[salsa::cycle(crate::lower::ty_recover)]
+ fn ty(&self, def: TyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::value_ty_query)]
+ fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::impl_self_ty_query)]
+ #[salsa::cycle(crate::lower::impl_self_ty_recover)]
+ fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::const_param_ty_query)]
+ fn const_param_ty(&self, def: ConstParamId) -> Ty;
+
+ #[salsa::invoke(crate::consteval::const_eval_query)]
+ #[salsa::cycle(crate::consteval::const_eval_recover)]
+ fn const_eval(&self, def: ConstId) -> Result<ComputedExpr, ConstEvalError>;
+
+ #[salsa::invoke(crate::lower::impl_trait_query)]
+ fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
+
+ #[salsa::invoke(crate::lower::field_types_query)]
+ fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
+
+ #[salsa::invoke(crate::lower::callable_item_sig)]
+ fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
+
+ #[salsa::invoke(crate::lower::return_type_impl_traits)]
+ fn return_type_impl_traits(
+ &self,
+ def: FunctionId,
+ ) -> Option<Arc<Binders<ReturnTypeImplTraits>>>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
+ #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
+ fn generic_predicates_for_param(
+ &self,
+ def: GenericDefId,
+ param_id: TypeOrConstParamId,
+ assoc_name: Option<Name>,
+ ) -> Arc<[Binders<QuantifiedWhereClause>]>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_query)]
+ fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>;
+
+ #[salsa::invoke(crate::lower::trait_environment_query)]
+ fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;
+
+ #[salsa::invoke(crate::lower::generic_defaults_query)]
+ #[salsa::cycle(crate::lower::generic_defaults_recover)]
+ fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<GenericArg>]>;
+
+ #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
+ fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
+
+ #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
+ fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
+
+ /// Collects all crates in the dependency graph that have impls for the
+ /// given fingerprint. This is only used for primitive types; for
+ /// user-defined types we just look at the crate where the type is defined.
+ #[salsa::invoke(crate::method_resolution::inherent_impl_crates_query)]
+ fn inherent_impl_crates(&self, krate: CrateId, fp: TyFingerprint) -> ArrayVec<CrateId, 2>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
+ fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
+ fn trait_impls_in_block(&self, krate: BlockId) -> Option<Arc<TraitImpls>>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
+ fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ // Interned IDs for Chalk integration
+ #[salsa::interned]
+ fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
+ #[salsa::interned]
+ fn intern_type_or_const_param_id(
+ &self,
+ param_id: TypeOrConstParamId,
+ ) -> InternedTypeOrConstParamId;
+ #[salsa::interned]
+ fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
+ #[salsa::interned]
+ fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
+ #[salsa::interned]
+ fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> InternedClosureId;
+
+ #[salsa::invoke(chalk_db::associated_ty_data_query)]
+ fn associated_ty_data(&self, id: chalk_db::AssocTypeId) -> Arc<chalk_db::AssociatedTyDatum>;
+
+ #[salsa::invoke(chalk_db::trait_datum_query)]
+ fn trait_datum(&self, krate: CrateId, trait_id: chalk_db::TraitId)
+ -> Arc<chalk_db::TraitDatum>;
+
+ #[salsa::invoke(chalk_db::struct_datum_query)]
+ fn struct_datum(
+ &self,
+ krate: CrateId,
+ struct_id: chalk_db::AdtId,
+ ) -> Arc<chalk_db::StructDatum>;
+
+ #[salsa::invoke(chalk_db::impl_datum_query)]
+ fn impl_datum(&self, krate: CrateId, impl_id: chalk_db::ImplId) -> Arc<chalk_db::ImplDatum>;
+
+ #[salsa::invoke(chalk_db::fn_def_datum_query)]
+ fn fn_def_datum(&self, krate: CrateId, fn_def_id: FnDefId) -> Arc<chalk_db::FnDefDatum>;
+
+ #[salsa::invoke(chalk_db::fn_def_variance_query)]
+ fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
+
+ #[salsa::invoke(chalk_db::adt_variance_query)]
+ fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
+
+ #[salsa::invoke(chalk_db::associated_ty_value_query)]
+ fn associated_ty_value(
+ &self,
+ krate: CrateId,
+ id: chalk_db::AssociatedTyValueId,
+ ) -> Arc<chalk_db::AssociatedTyValue>;
+
+ #[salsa::invoke(trait_solve_wait)]
+ #[salsa::transparent]
+ fn trait_solve(
+ &self,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+ ) -> Option<crate::Solution>;
+
+ #[salsa::invoke(crate::traits::trait_solve_query)]
+ fn trait_solve_query(
+ &self,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+ ) -> Option<crate::Solution>;
+
+ #[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
+ fn program_clauses_for_chalk_env(
+ &self,
+ krate: CrateId,
+ env: chalk_ir::Environment<Interner>,
+ ) -> chalk_ir::ProgramClauses<Interner>;
+}
+
+fn infer_wait(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer:wait").detail(|| match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
+ DefWithBodyId::StaticId(it) => db.static_data(it).name.clone().to_string(),
+ DefWithBodyId::ConstId(it) => {
+ db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
+ }
+ });
+ db.infer_query(def)
+}
+
+fn trait_solve_wait(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
+) -> Option<crate::Solution> {
+ let _p = profile::span("trait_solve::wait");
+ db.trait_solve_query(krate, goal)
+}
+
+#[test]
+fn hir_database_is_object_safe() {
+ fn _assert_object_safe(_: &dyn HirDatabase) {}
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedTypeOrConstParamId(salsa::InternId);
+impl_intern_key!(InternedTypeOrConstParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedLifetimeParamId(salsa::InternId);
+impl_intern_key!(InternedLifetimeParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedConstParamId(salsa::InternId);
+impl_intern_key!(InternedConstParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedOpaqueTyId(salsa::InternId);
+impl_intern_key!(InternedOpaqueTyId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedClosureId(salsa::InternId);
+impl_intern_key!(InternedClosureId);
+
+/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
+/// we have different IDs for struct and enum variant constructors.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct InternedCallableDefId(salsa::InternId);
+impl_intern_key!(InternedCallableDefId);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
new file mode 100644
index 000000000..37eb06be1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -0,0 +1,13 @@
+//! Type inference-based diagnostics.
+mod expr;
+mod match_check;
+mod unsafe_check;
+mod decl_check;
+
+pub use crate::diagnostics::{
+ decl_check::{incorrect_case, IncorrectCase},
+ expr::{
+ record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
+ },
+ unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
+};
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
new file mode 100644
index 000000000..f7031a854
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -0,0 +1,701 @@
+//! Provides validators for names of declarations.
+//!
+//! This includes the following items:
+//!
+//! - variable bindings (e.g. `let x = foo();`)
+//! - struct fields (e.g. `struct Foo { field: u8 }`)
+//! - enum variants (e.g. `enum Foo { Variant { field: u8 } }`)
+//! - function/method arguments (e.g. `fn foo(arg: u8)`)
+//! - constants (e.g. `const FOO: u8 = 10;`)
+//! - static items (e.g. `static FOO: u8 = 10;`)
+//! - match arm bindings (e.g. `foo @ Some(_)`)
+
+mod case_conv;
+
+use std::fmt;
+
+use base_db::CrateId;
+use hir_def::{
+ adt::VariantData,
+ expr::{Pat, PatId},
+ src::HasSource,
+ AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, StaticId,
+ StructId,
+};
+use hir_expand::{
+ name::{AsName, Name},
+ HirFileId,
+};
+use stdx::{always, never};
+use syntax::{
+ ast::{self, HasName},
+ AstNode, AstPtr,
+};
+
+use crate::db::HirDatabase;
+
+use self::case_conv::{to_camel_case, to_lower_snake_case, to_upper_snake_case};
+
+mod allow {
+ pub(super) const BAD_STYLE: &str = "bad_style";
+ pub(super) const NONSTANDARD_STYLE: &str = "nonstandard_style";
+ pub(super) const NON_SNAKE_CASE: &str = "non_snake_case";
+ pub(super) const NON_UPPER_CASE_GLOBAL: &str = "non_upper_case_globals";
+ pub(super) const NON_CAMEL_CASE_TYPES: &str = "non_camel_case_types";
+}
+
+pub fn incorrect_case(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ owner: ModuleDefId,
+) -> Vec<IncorrectCase> {
+ let _p = profile::span("validate_module_item");
+ let mut validator = DeclValidator::new(db, krate);
+ validator.validate_item(owner);
+ validator.sink
+}
+
+#[derive(Debug)]
+pub enum CaseType {
+ // `some_var`
+ LowerSnakeCase,
+ // `SOME_CONST`
+ UpperSnakeCase,
+ // `SomeStruct`
+ UpperCamelCase,
+}
+
+impl fmt::Display for CaseType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let repr = match self {
+ CaseType::LowerSnakeCase => "snake_case",
+ CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE",
+ CaseType::UpperCamelCase => "CamelCase",
+ };
+
+ repr.fmt(f)
+ }
+}
+
+#[derive(Debug)]
+pub enum IdentType {
+ Constant,
+ Enum,
+ Field,
+ Function,
+ Parameter,
+ StaticVariable,
+ Structure,
+ Variable,
+ Variant,
+}
+
+impl fmt::Display for IdentType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let repr = match self {
+ IdentType::Constant => "Constant",
+ IdentType::Enum => "Enum",
+ IdentType::Field => "Field",
+ IdentType::Function => "Function",
+ IdentType::Parameter => "Parameter",
+ IdentType::StaticVariable => "Static variable",
+ IdentType::Structure => "Structure",
+ IdentType::Variable => "Variable",
+ IdentType::Variant => "Variant",
+ };
+
+ repr.fmt(f)
+ }
+}
+
+#[derive(Debug)]
+pub struct IncorrectCase {
+ pub file: HirFileId,
+ pub ident: AstPtr<ast::Name>,
+ pub expected_case: CaseType,
+ pub ident_type: IdentType,
+ pub ident_text: String,
+ pub suggested_text: String,
+}
+
+pub(super) struct DeclValidator<'a> {
+ db: &'a dyn HirDatabase,
+ krate: CrateId,
+ pub(super) sink: Vec<IncorrectCase>,
+}
+
+#[derive(Debug)]
+struct Replacement {
+ current_name: Name,
+ suggested_text: String,
+ expected_case: CaseType,
+}
+
+impl<'a> DeclValidator<'a> {
+ pub(super) fn new(db: &'a dyn HirDatabase, krate: CrateId) -> DeclValidator<'a> {
+ DeclValidator { db, krate, sink: Vec::new() }
+ }
+
+ pub(super) fn validate_item(&mut self, item: ModuleDefId) {
+ match item {
+ ModuleDefId::FunctionId(func) => self.validate_func(func),
+ ModuleDefId::AdtId(adt) => self.validate_adt(adt),
+ ModuleDefId::ConstId(const_id) => self.validate_const(const_id),
+ ModuleDefId::StaticId(static_id) => self.validate_static(static_id),
+ _ => (),
+ }
+ }
+
+ fn validate_adt(&mut self, adt: AdtId) {
+ match adt {
+ AdtId::StructId(struct_id) => self.validate_struct(struct_id),
+ AdtId::EnumId(enum_id) => self.validate_enum(enum_id),
+ AdtId::UnionId(_) => {
+ // FIXME: Unions aren't yet supported by this validator.
+ }
+ }
+ }
+
+ /// Checks whether not following the convention is allowed for this item.
+ fn allowed(&self, id: AttrDefId, allow_name: &str, recursing: bool) -> bool {
+ let is_allowed = |def_id| {
+ let attrs = self.db.attrs(def_id);
+ // don't bug the user about directly no_mangle annotated stuff, they can't do anything about it
+ (!recursing && attrs.by_key("no_mangle").exists())
+ || attrs.by_key("allow").tt_values().any(|tt| {
+ let allows = tt.to_string();
+ allows.contains(allow_name)
+ || allows.contains(allow::BAD_STYLE)
+ || allows.contains(allow::NONSTANDARD_STYLE)
+ })
+ };
+
+ is_allowed(id)
+ // go upwards one step or give up
+ || match id {
+ AttrDefId::ModuleId(m) => m.containing_module(self.db.upcast()).map(|v| v.into()),
+ AttrDefId::FunctionId(f) => Some(f.lookup(self.db.upcast()).container.into()),
+ AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
+ // These warnings should not explore macro definitions at all
+ AttrDefId::MacroId(_) => None,
+ AttrDefId::AdtId(aid) => match aid {
+ AdtId::StructId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
+ AdtId::EnumId(eid) => Some(eid.lookup(self.db.upcast()).container.into()),
+ // Unions aren't yet supported
+ AdtId::UnionId(_) => None,
+ },
+ AttrDefId::FieldId(_) => None,
+ AttrDefId::EnumVariantId(_) => None,
+ AttrDefId::TypeAliasId(_) => None,
+ AttrDefId::GenericParamId(_) => None,
+ }
+ .map(|mid| self.allowed(mid, allow_name, true))
+ .unwrap_or(false)
+ }
+
+ fn validate_func(&mut self, func: FunctionId) {
+ let data = self.db.function_data(func);
+ if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) {
+ cov_mark::hit!(extern_func_incorrect_case_ignored);
+ return;
+ }
+
+ let body = self.db.body(func.into());
+
+ // Recursively validate inner scope items, such as static variables and constants.
+ for (_, block_def_map) in body.blocks(self.db.upcast()) {
+ for (_, module) in block_def_map.modules() {
+ for def_id in module.scope.declarations() {
+ let mut validator = DeclValidator::new(self.db, self.krate);
+ validator.validate_item(def_id);
+ }
+ }
+ }
+
+ // Check whether non-snake case identifiers are allowed for this function.
+ if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
+ return;
+ }
+
+ // Check the function name.
+ let function_name = data.name.to_string();
+ let fn_name_replacement = to_lower_snake_case(&function_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ });
+
+ // Check the patterns inside the function body.
+ // This includes function parameters.
+ let pats_replacements = body
+ .pats
+ .iter()
+ .filter_map(|(id, pat)| match pat {
+ Pat::Bind { name, .. } => Some((id, name)),
+ _ => None,
+ })
+ .filter_map(|(id, bind_name)| {
+ Some((
+ id,
+ Replacement {
+ current_name: bind_name.clone(),
+ suggested_text: to_lower_snake_case(&bind_name.to_string())?,
+ expected_case: CaseType::LowerSnakeCase,
+ },
+ ))
+ })
+ .collect();
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ if let Some(fn_name_replacement) = fn_name_replacement {
+ self.create_incorrect_case_diagnostic_for_func(func, fn_name_replacement);
+ }
+
+ self.create_incorrect_case_diagnostic_for_variables(func, pats_replacements);
+ }
+
+ /// Given the information about incorrect names in the function declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_func(
+ &mut self,
+ func: FunctionId,
+ fn_name_replacement: Replacement,
+ ) {
+ let fn_loc = func.lookup(self.db.upcast());
+ let fn_src = fn_loc.source(self.db.upcast());
+
+ // Diagnostic for function name.
+ let ast_ptr = match fn_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a function without a name: {:?}",
+ fn_name_replacement,
+ fn_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: fn_src.file_id,
+ ident_type: IdentType::Function,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: fn_name_replacement.expected_case,
+ ident_text: fn_name_replacement.current_name.to_string(),
+ suggested_text: fn_name_replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ /// Given the information about incorrect variable names, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_variables(
+ &mut self,
+ func: FunctionId,
+ pats_replacements: Vec<(PatId, Replacement)>,
+ ) {
+ // XXX: only look at source_map if we do have missing fields
+ if pats_replacements.is_empty() {
+ return;
+ }
+
+ let (_, source_map) = self.db.body_with_source_map(func.into());
+
+ for (id, replacement) in pats_replacements {
+ if let Ok(source_ptr) = source_map.pat_syntax(id) {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(self.db.upcast());
+ if let ast::Pat::IdentPat(ident_pat) = expr.to_node(&root) {
+ let parent = match ident_pat.syntax().parent() {
+ Some(parent) => parent,
+ None => continue,
+ };
+ let name_ast = match ident_pat.name() {
+ Some(name_ast) => name_ast,
+ None => continue,
+ };
+
+ let is_param = ast::Param::can_cast(parent.kind());
+
+ // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
+ // because e.g. match arms are patterns as well.
+ // In other words, we check that it's a named variable binding.
+ let is_binding = ast::LetStmt::can_cast(parent.kind())
+ || (ast::MatchArm::can_cast(parent.kind())
+ && ident_pat.at_token().is_some());
+ if !(is_param || is_binding) {
+ // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
+ continue;
+ }
+
+ let ident_type =
+ if is_param { IdentType::Parameter } else { IdentType::Variable };
+
+ let diagnostic = IncorrectCase {
+ file: source_ptr.file_id,
+ ident_type,
+ ident: AstPtr::new(&name_ast),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+ }
+ }
+ }
+
+ fn validate_struct(&mut self, struct_id: StructId) {
+ let data = self.db.struct_data(struct_id);
+
+ let non_camel_case_allowed =
+ self.allowed(struct_id.into(), allow::NON_CAMEL_CASE_TYPES, false);
+ let non_snake_case_allowed = self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false);
+
+ // Check the structure name.
+ let struct_name = data.name.to_string();
+ let struct_name_replacement = if !non_camel_case_allowed {
+ to_camel_case(&struct_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperCamelCase,
+ })
+ } else {
+ None
+ };
+
+ // Check the field names.
+ let mut struct_fields_replacements = Vec::new();
+
+ if !non_snake_case_allowed {
+ if let VariantData::Record(fields) = data.variant_data.as_ref() {
+ for (_, field) in fields.iter() {
+ let field_name = field.name.to_string();
+ if let Some(new_name) = to_lower_snake_case(&field_name) {
+ let replacement = Replacement {
+ current_name: field.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ };
+ struct_fields_replacements.push(replacement);
+ }
+ }
+ }
+ }
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ self.create_incorrect_case_diagnostic_for_struct(
+ struct_id,
+ struct_name_replacement,
+ struct_fields_replacements,
+ );
+ }
+
+ /// Given the information about incorrect names in the struct declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_struct(
+ &mut self,
+ struct_id: StructId,
+ struct_name_replacement: Option<Replacement>,
+ struct_fields_replacements: Vec<Replacement>,
+ ) {
+ // XXX: Only look at sources if we do have incorrect names.
+ if struct_name_replacement.is_none() && struct_fields_replacements.is_empty() {
+ return;
+ }
+
+ let struct_loc = struct_id.lookup(self.db.upcast());
+ let struct_src = struct_loc.source(self.db.upcast());
+
+ if let Some(replacement) = struct_name_replacement {
+ let ast_ptr = match struct_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a structure without a name: {:?}",
+ replacement,
+ struct_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: struct_src.file_id,
+ ident_type: IdentType::Structure,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ let struct_fields_list = match struct_src.value.field_list() {
+ Some(ast::FieldList::RecordFieldList(fields)) => fields,
+ _ => {
+ always!(
+ struct_fields_replacements.is_empty(),
+ "Replacements ({:?}) were generated for a structure fields which had no fields list: {:?}",
+ struct_fields_replacements,
+ struct_src
+ );
+ return;
+ }
+ };
+ let mut struct_fields_iter = struct_fields_list.fields();
+ for field_to_rename in struct_fields_replacements {
+ // We assume that parameters in replacement are in the same order as in the
+ // actual params list, but just some of them (ones that named correctly) are skipped.
+ let ast_ptr = loop {
+ match struct_fields_iter.next().and_then(|field| field.name()) {
+ Some(field_name) => {
+ if field_name.as_name() == field_to_rename.current_name {
+ break field_name;
+ }
+ }
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a structure field which was not found: {:?}",
+ field_to_rename, struct_src
+ );
+ return;
+ }
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: struct_src.file_id,
+ ident_type: IdentType::Field,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: field_to_rename.expected_case,
+ ident_text: field_to_rename.current_name.to_string(),
+ suggested_text: field_to_rename.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+
+ fn validate_enum(&mut self, enum_id: EnumId) {
+ let data = self.db.enum_data(enum_id);
+
+ // Check whether non-camel case names are allowed for this enum.
+ if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
+ return;
+ }
+
+ // Check the enum name.
+ let enum_name = data.name.to_string();
+ let enum_name_replacement = to_camel_case(&enum_name).map(|new_name| Replacement {
+ current_name: data.name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperCamelCase,
+ });
+
+ // Check the field names.
+ let enum_fields_replacements = data
+ .variants
+ .iter()
+ .filter_map(|(_, variant)| {
+ Some(Replacement {
+ current_name: variant.name.clone(),
+ suggested_text: to_camel_case(&variant.name.to_string())?,
+ expected_case: CaseType::UpperCamelCase,
+ })
+ })
+ .collect();
+
+ // If there is at least one element to spawn a warning on, go to the source map and generate a warning.
+ self.create_incorrect_case_diagnostic_for_enum(
+ enum_id,
+ enum_name_replacement,
+ enum_fields_replacements,
+ )
+ }
+
+ /// Given the information about incorrect names in the struct declaration, looks up into the source code
+ /// for exact locations and adds diagnostics into the sink.
+ fn create_incorrect_case_diagnostic_for_enum(
+ &mut self,
+ enum_id: EnumId,
+ enum_name_replacement: Option<Replacement>,
+ enum_variants_replacements: Vec<Replacement>,
+ ) {
+ // XXX: only look at sources if we do have incorrect names
+ if enum_name_replacement.is_none() && enum_variants_replacements.is_empty() {
+ return;
+ }
+
+ let enum_loc = enum_id.lookup(self.db.upcast());
+ let enum_src = enum_loc.source(self.db.upcast());
+
+ if let Some(replacement) = enum_name_replacement {
+ let ast_ptr = match enum_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a enum without a name: {:?}",
+ replacement,
+ enum_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: enum_src.file_id,
+ ident_type: IdentType::Enum,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ let enum_variants_list = match enum_src.value.variant_list() {
+ Some(variants) => variants,
+ _ => {
+ always!(
+ enum_variants_replacements.is_empty(),
+ "Replacements ({:?}) were generated for a enum variants which had no fields list: {:?}",
+ enum_variants_replacements,
+ enum_src
+ );
+ return;
+ }
+ };
+ let mut enum_variants_iter = enum_variants_list.variants();
+ for variant_to_rename in enum_variants_replacements {
+ // We assume that parameters in replacement are in the same order as in the
+ // actual params list, but just some of them (ones that named correctly) are skipped.
+ let ast_ptr = loop {
+ match enum_variants_iter.next().and_then(|v| v.name()) {
+ Some(variant_name) => {
+ if variant_name.as_name() == variant_to_rename.current_name {
+ break variant_name;
+ }
+ }
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a enum variant which was not found: {:?}",
+ variant_to_rename, enum_src
+ );
+ return;
+ }
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: enum_src.file_id,
+ ident_type: IdentType::Variant,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: variant_to_rename.expected_case,
+ ident_text: variant_to_rename.current_name.to_string(),
+ suggested_text: variant_to_rename.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+
+ fn validate_const(&mut self, const_id: ConstId) {
+ let data = self.db.const_data(const_id);
+
+ if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
+ return;
+ }
+
+ let name = match &data.name {
+ Some(name) => name,
+ None => return,
+ };
+
+ let const_name = name.to_string();
+ let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) {
+ Replacement {
+ current_name: name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperSnakeCase,
+ }
+ } else {
+ // Nothing to do here.
+ return;
+ };
+
+ let const_loc = const_id.lookup(self.db.upcast());
+ let const_src = const_loc.source(self.db.upcast());
+
+ let ast_ptr = match const_src.value.name() {
+ Some(name) => name,
+ None => return,
+ };
+
+ let diagnostic = IncorrectCase {
+ file: const_src.file_id,
+ ident_type: IdentType::Constant,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+
+ fn validate_static(&mut self, static_id: StaticId) {
+ let data = self.db.static_data(static_id);
+ if data.is_extern {
+ cov_mark::hit!(extern_static_incorrect_case_ignored);
+ return;
+ }
+
+ if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
+ return;
+ }
+
+ let name = &data.name;
+
+ let static_name = name.to_string();
+ let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) {
+ Replacement {
+ current_name: name.clone(),
+ suggested_text: new_name,
+ expected_case: CaseType::UpperSnakeCase,
+ }
+ } else {
+ // Nothing to do here.
+ return;
+ };
+
+ let static_loc = static_id.lookup(self.db.upcast());
+ let static_src = static_loc.source(self.db.upcast());
+
+ let ast_ptr = match static_src.value.name() {
+ Some(name) => name,
+ None => return,
+ };
+
+ let diagnostic = IncorrectCase {
+ file: static_src.file_id,
+ ident_type: IdentType::StaticVariable,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.to_string(),
+ suggested_text: replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
new file mode 100644
index 000000000..88d607194
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -0,0 +1,199 @@
+//! Functions for string case manipulation, such as detecting the identifier case,
+//! and converting it into appropriate form.
+
+// Code that was taken from rustc was taken at commit 89fdb30,
+// from file /compiler/rustc_lint/src/nonstandard_style.rs
+
+/// Converts an identifier to an UpperCamelCase form.
+/// Returns `None` if the string is already in UpperCamelCase.
+pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
+ if is_camel_case(ident) {
+ return None;
+ }
+
+ // Taken from rustc.
+ let ret = ident
+ .trim_matches('_')
+ .split('_')
+ .filter(|component| !component.is_empty())
+ .map(|component| {
+ let mut camel_cased_component = String::with_capacity(component.len());
+
+ let mut new_word = true;
+ let mut prev_is_lower_case = true;
+
+ for c in component.chars() {
+ // Preserve the case if an uppercase letter follows a lowercase letter, so that
+ // `camelCase` is converted to `CamelCase`.
+ if prev_is_lower_case && c.is_uppercase() {
+ new_word = true;
+ }
+
+ if new_word {
+ camel_cased_component.extend(c.to_uppercase());
+ } else {
+ camel_cased_component.extend(c.to_lowercase());
+ }
+
+ prev_is_lower_case = c.is_lowercase();
+ new_word = false;
+ }
+
+ camel_cased_component
+ })
+ .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
+ // separate two components with an underscore if their boundary cannot
+ // be distinguished using an uppercase/lowercase case distinction
+ let join = prev
+ .and_then(|prev| {
+ let f = next.chars().next()?;
+ let l = prev.chars().last()?;
+ Some(!char_has_case(l) && !char_has_case(f))
+ })
+ .unwrap_or(false);
+ (acc + if join { "_" } else { "" } + &next, Some(next))
+ })
+ .0;
+ Some(ret)
+}
+
+/// Converts an identifier to a lower_snake_case form.
+/// Returns `None` if the string is already in lower_snake_case.
+pub(crate) fn to_lower_snake_case(ident: &str) -> Option<String> {
+ if is_lower_snake_case(ident) {
+ return None;
+ } else if is_upper_snake_case(ident) {
+ return Some(ident.to_lowercase());
+ }
+
+ Some(stdx::to_lower_snake_case(ident))
+}
+
+/// Converts an identifier to an UPPER_SNAKE_CASE form.
+/// Returns `None` if the string is already is UPPER_SNAKE_CASE.
+pub(crate) fn to_upper_snake_case(ident: &str) -> Option<String> {
+ if is_upper_snake_case(ident) {
+ return None;
+ } else if is_lower_snake_case(ident) {
+ return Some(ident.to_uppercase());
+ }
+
+ Some(stdx::to_upper_snake_case(ident))
+}
+
+// Taken from rustc.
+// Modified by replacing the use of unstable feature `array_windows`.
+fn is_camel_case(name: &str) -> bool {
+ let name = name.trim_matches('_');
+ if name.is_empty() {
+ return true;
+ }
+
+ let mut fst = None;
+ // start with a non-lowercase letter rather than non-uppercase
+ // ones (some scripts don't have a concept of upper/lowercase)
+ name.chars().next().map_or(true, |c| !c.is_lowercase())
+ && !name.contains("__")
+ && !name.chars().any(|snd| {
+ let ret = match fst {
+ None => false,
+ Some(fst) => char_has_case(fst) && snd == '_' || char_has_case(snd) && fst == '_',
+ };
+ fst = Some(snd);
+
+ ret
+ })
+}
+
+fn is_lower_snake_case(ident: &str) -> bool {
+ is_snake_case(ident, char::is_uppercase)
+}
+
+fn is_upper_snake_case(ident: &str) -> bool {
+ is_snake_case(ident, char::is_lowercase)
+}
+
+// Taken from rustc.
+// Modified to allow checking for both upper and lower snake case.
+fn is_snake_case<F: Fn(char) -> bool>(ident: &str, wrong_case: F) -> bool {
+ if ident.is_empty() {
+ return true;
+ }
+ let ident = ident.trim_matches('_');
+
+ let mut allow_underscore = true;
+ ident.chars().all(|c| {
+ allow_underscore = match c {
+ '_' if !allow_underscore => return false,
+ '_' => false,
+ // It would be more obvious to check for the correct case,
+ // but some characters do not have a case.
+ c if !wrong_case(c) => true,
+ _ => return false,
+ };
+ true
+ })
+}
+
+// Taken from rustc.
+fn char_has_case(c: char) -> bool {
+ c.is_lowercase() || c.is_uppercase()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check<F: Fn(&str) -> Option<String>>(fun: F, input: &str, expect: Expect) {
+ // `None` is translated to empty string, meaning that there is nothing to fix.
+ let output = fun(input).unwrap_or_default();
+
+ expect.assert_eq(&output);
+ }
+
+ #[test]
+ fn test_to_lower_snake_case() {
+ check(to_lower_snake_case, "lower_snake_case", expect![[""]]);
+ check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]);
+ check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]);
+ check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]);
+ check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]);
+ check(to_lower_snake_case, "a", expect![[""]]);
+ check(to_lower_snake_case, "abc", expect![[""]]);
+ check(to_lower_snake_case, "foo__bar", expect![["foo_bar"]]);
+ }
+
+ #[test]
+ fn test_to_camel_case() {
+ check(to_camel_case, "CamelCase", expect![[""]]);
+ check(to_camel_case, "CamelCase_", expect![[""]]);
+ check(to_camel_case, "_CamelCase", expect![[""]]);
+ check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]);
+ check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]);
+ check(to_camel_case, "UPPER_SNAKE_CASE", expect![["UpperSnakeCase"]]);
+ check(to_camel_case, "Weird_Case", expect![["WeirdCase"]]);
+ check(to_camel_case, "name", expect![["Name"]]);
+ check(to_camel_case, "A", expect![[""]]);
+ check(to_camel_case, "AABB", expect![[""]]);
+ // Taken from rustc: /compiler/rustc_lint/src/nonstandard_style/tests.rs
+ check(to_camel_case, "X86_64", expect![[""]]);
+ check(to_camel_case, "x86__64", expect![["X86_64"]]);
+ check(to_camel_case, "Abc_123", expect![["Abc123"]]);
+ check(to_camel_case, "A1_b2_c3", expect![["A1B2C3"]]);
+ }
+
+ #[test]
+ fn test_to_upper_snake_case() {
+ check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]);
+ check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]);
+ check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]);
+ check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]);
+ check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]);
+ check(to_upper_snake_case, "A", expect![[""]]);
+ check(to_upper_snake_case, "ABC", expect![[""]]);
+ check(to_upper_snake_case, "X86_64", expect![[""]]);
+ check(to_upper_snake_case, "FOO_BAr", expect![["FOO_BAR"]]);
+ check(to_upper_snake_case, "FOO__BAR", expect![["FOO_BAR"]]);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
new file mode 100644
index 000000000..642e03edd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -0,0 +1,416 @@
+//! Various diagnostics for expressions that are collected together in one pass
+//! through the body using inference results: mismatched arg counts, missing
+//! fields, etc.
+
+use std::fmt;
+use std::sync::Arc;
+
+use hir_def::{path::path, resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
+use hir_expand::name;
+use itertools::Either;
+use itertools::Itertools;
+use rustc_hash::FxHashSet;
+use typed_arena::Arena;
+
+use crate::{
+ db::HirDatabase,
+ diagnostics::match_check::{
+ self,
+ deconstruct_pat::DeconstructedPat,
+ usefulness::{compute_match_usefulness, MatchCheckCtx},
+ },
+ display::HirDisplay,
+ InferenceResult, Ty, TyExt,
+};
+
+pub(crate) use hir_def::{
+ body::Body,
+ expr::{Expr, ExprId, MatchArm, Pat, PatId},
+ LocalFieldId, VariantId,
+};
+
+pub enum BodyValidationDiagnostic {
+ RecordMissingFields {
+ record: Either<ExprId, PatId>,
+ variant: VariantId,
+ missed_fields: Vec<LocalFieldId>,
+ },
+ ReplaceFilterMapNextWithFindMap {
+ method_call_expr: ExprId,
+ },
+ MissingMatchArms {
+ match_expr: ExprId,
+ uncovered_patterns: String,
+ },
+}
+
+impl BodyValidationDiagnostic {
+ pub fn collect(db: &dyn HirDatabase, owner: DefWithBodyId) -> Vec<BodyValidationDiagnostic> {
+ let _p = profile::span("BodyValidationDiagnostic::collect");
+ let infer = db.infer(owner);
+ let mut validator = ExprValidator::new(owner, infer);
+ validator.validate_body(db);
+ validator.diagnostics
+ }
+}
+
+struct ExprValidator {
+ owner: DefWithBodyId,
+ infer: Arc<InferenceResult>,
+ pub(super) diagnostics: Vec<BodyValidationDiagnostic>,
+}
+
+impl ExprValidator {
+ fn new(owner: DefWithBodyId, infer: Arc<InferenceResult>) -> ExprValidator {
+ ExprValidator { owner, infer, diagnostics: Vec::new() }
+ }
+
+ fn validate_body(&mut self, db: &dyn HirDatabase) {
+ let body = db.body(self.owner);
+ let mut filter_map_next_checker = None;
+
+ for (id, expr) in body.exprs.iter() {
+ if let Some((variant, missed_fields, true)) =
+ record_literal_missing_fields(db, &self.infer, id, expr)
+ {
+ self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
+ record: Either::Left(id),
+ variant,
+ missed_fields,
+ });
+ }
+
+ match expr {
+ Expr::Match { expr, arms } => {
+ self.validate_match(id, *expr, arms, db, self.infer.clone());
+ }
+ Expr::Call { .. } | Expr::MethodCall { .. } => {
+ self.validate_call(db, id, expr, &mut filter_map_next_checker);
+ }
+ _ => {}
+ }
+ }
+ for (id, pat) in body.pats.iter() {
+ if let Some((variant, missed_fields, true)) =
+ record_pattern_missing_fields(db, &self.infer, id, pat)
+ {
+ self.diagnostics.push(BodyValidationDiagnostic::RecordMissingFields {
+ record: Either::Right(id),
+ variant,
+ missed_fields,
+ });
+ }
+ }
+ }
+
+ fn validate_call(
+ &mut self,
+ db: &dyn HirDatabase,
+ call_id: ExprId,
+ expr: &Expr,
+ filter_map_next_checker: &mut Option<FilterMapNextChecker>,
+ ) {
+ // Check that the number of arguments matches the number of parameters.
+
+ // FIXME: Due to shortcomings in the current type system implementation, only emit this
+ // diagnostic if there are no type mismatches in the containing function.
+ if self.infer.expr_type_mismatches().next().is_some() {
+ return;
+ }
+
+ match expr {
+ Expr::MethodCall { receiver, .. } => {
+ let (callee, _) = match self.infer.method_resolution(call_id) {
+ Some(it) => it,
+ None => return,
+ };
+
+ if filter_map_next_checker
+ .get_or_insert_with(|| {
+ FilterMapNextChecker::new(&self.owner.resolver(db.upcast()), db)
+ })
+ .check(call_id, receiver, &callee)
+ .is_some()
+ {
+ self.diagnostics.push(
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap {
+ method_call_expr: call_id,
+ },
+ );
+ }
+ }
+ _ => return,
+ };
+ }
+
+ fn validate_match(
+ &mut self,
+ id: ExprId,
+ match_expr: ExprId,
+ arms: &[MatchArm],
+ db: &dyn HirDatabase,
+ infer: Arc<InferenceResult>,
+ ) {
+ let body = db.body(self.owner);
+
+ let match_expr_ty = &infer[match_expr];
+ if match_expr_ty.is_unknown() {
+ return;
+ }
+
+ let pattern_arena = Arena::new();
+ let cx = MatchCheckCtx {
+ module: self.owner.module(db.upcast()),
+ body: self.owner,
+ db,
+ pattern_arena: &pattern_arena,
+ };
+
+ let mut m_arms = Vec::with_capacity(arms.len());
+ let mut has_lowering_errors = false;
+ for arm in arms {
+ if let Some(pat_ty) = infer.type_of_pat.get(arm.pat) {
+ // We only include patterns whose type matches the type
+ // of the match expression. If we had an InvalidMatchArmPattern
+ // diagnostic or similar we could raise that in an else
+ // block here.
+ //
+ // When comparing the types, we also have to consider that rustc
+ // will automatically de-reference the match expression type if
+ // necessary.
+ //
+ // FIXME we should use the type checker for this.
+ if (pat_ty == match_expr_ty
+ || match_expr_ty
+ .as_reference()
+ .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
+ .unwrap_or(false))
+ && types_of_subpatterns_do_match(arm.pat, &body, &infer)
+ {
+ // If we had a NotUsefulMatchArm diagnostic, we could
+ // check the usefulness of each pattern as we added it
+ // to the matrix here.
+ let m_arm = match_check::MatchArm {
+ pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors),
+ has_guard: arm.guard.is_some(),
+ };
+ m_arms.push(m_arm);
+ if !has_lowering_errors {
+ continue;
+ }
+ }
+ }
+
+ // If we can't resolve the type of a pattern, or the pattern type doesn't
+ // fit the match expression, we skip this diagnostic. Skipping the entire
+ // diagnostic rather than just not including this match arm is preferred
+ // to avoid the chance of false positives.
+ cov_mark::hit!(validate_match_bailed_out);
+ return;
+ }
+
+ let report = compute_match_usefulness(&cx, &m_arms, match_expr_ty);
+
+ // FIXME Report unreacheble arms
+ // https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
+
+ let witnesses = report.non_exhaustiveness_witnesses;
+ if !witnesses.is_empty() {
+ self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
+ match_expr: id,
+ uncovered_patterns: missing_match_arms(&cx, match_expr_ty, witnesses, arms),
+ });
+ }
+ }
+
+ fn lower_pattern<'p>(
+ &self,
+ cx: &MatchCheckCtx<'_, 'p>,
+ pat: PatId,
+ db: &dyn HirDatabase,
+ body: &Body,
+ have_errors: &mut bool,
+ ) -> &'p DeconstructedPat<'p> {
+ let mut patcx = match_check::PatCtxt::new(db, &self.infer, body);
+ let pattern = patcx.lower_pattern(pat);
+ let pattern = cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern));
+ if !patcx.errors.is_empty() {
+ *have_errors = true;
+ }
+ pattern
+ }
+}
+
+struct FilterMapNextChecker {
+ filter_map_function_id: Option<hir_def::FunctionId>,
+ next_function_id: Option<hir_def::FunctionId>,
+ prev_filter_map_expr_id: Option<ExprId>,
+}
+
+impl FilterMapNextChecker {
+ fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self {
+ // Find and store the FunctionIds for Iterator::filter_map and Iterator::next
+ let iterator_path = path![core::iter::Iterator];
+ let mut filter_map_function_id = None;
+ let mut next_function_id = None;
+
+ if let Some(iterator_trait_id) = resolver.resolve_known_trait(db.upcast(), &iterator_path) {
+ let iterator_trait_items = &db.trait_data(iterator_trait_id).items;
+ for item in iterator_trait_items.iter() {
+ if let (name, AssocItemId::FunctionId(id)) = item {
+ if *name == name![filter_map] {
+ filter_map_function_id = Some(*id);
+ }
+ if *name == name![next] {
+ next_function_id = Some(*id);
+ }
+ }
+ if filter_map_function_id.is_some() && next_function_id.is_some() {
+ break;
+ }
+ }
+ }
+ Self { filter_map_function_id, next_function_id, prev_filter_map_expr_id: None }
+ }
+
+ // check for instances of .filter_map(..).next()
+ fn check(
+ &mut self,
+ current_expr_id: ExprId,
+ receiver_expr_id: &ExprId,
+ function_id: &hir_def::FunctionId,
+ ) -> Option<()> {
+ if *function_id == self.filter_map_function_id? {
+ self.prev_filter_map_expr_id = Some(current_expr_id);
+ return None;
+ }
+
+ if *function_id == self.next_function_id? {
+ if let Some(prev_filter_map_expr_id) = self.prev_filter_map_expr_id {
+ if *receiver_expr_id == prev_filter_map_expr_id {
+ return Some(());
+ }
+ }
+ }
+
+ self.prev_filter_map_expr_id = None;
+ None
+ }
+}
+
+pub fn record_literal_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: ExprId,
+ expr: &Expr,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhaustive) = match expr {
+ Expr::RecordLit { fields, spread, ellipsis, is_assignee_expr, .. } => {
+ let exhaustive = if *is_assignee_expr { !*ellipsis } else { spread.is_none() };
+ (fields, exhaustive)
+ }
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_expr(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.variant_data(db.upcast());
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhaustive))
+}
+
+pub fn record_pattern_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: PatId,
+ pat: &Pat,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhaustive) = match pat {
+ Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_pat(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_def.variant_data(db.upcast());
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhaustive))
+}
+
+fn types_of_subpatterns_do_match(pat: PatId, body: &Body, infer: &InferenceResult) -> bool {
+ fn walk(pat: PatId, body: &Body, infer: &InferenceResult, has_type_mismatches: &mut bool) {
+ match infer.type_mismatch_for_pat(pat) {
+ Some(_) => *has_type_mismatches = true,
+ None => {
+ body[pat].walk_child_pats(|subpat| walk(subpat, body, infer, has_type_mismatches))
+ }
+ }
+ }
+
+ let mut has_type_mismatches = false;
+ walk(pat, body, infer, &mut has_type_mismatches);
+ !has_type_mismatches
+}
+
+fn missing_match_arms<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ scrut_ty: &Ty,
+ witnesses: Vec<DeconstructedPat<'p>>,
+ arms: &[MatchArm],
+) -> String {
+ struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
+ impl<'a, 'p> fmt::Display for DisplayWitness<'a, 'p> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let DisplayWitness(witness, cx) = *self;
+ let pat = witness.to_pat(cx);
+ write!(f, "{}", pat.display(cx.db))
+ }
+ }
+
+ let non_empty_enum = match scrut_ty.as_adt() {
+ Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
+ _ => false,
+ };
+ if arms.is_empty() && !non_empty_enum {
+ format!("type `{}` is non-empty", scrut_ty.display(cx.db))
+ } else {
+ let pat_display = |witness| DisplayWitness(witness, cx);
+ const LIMIT: usize = 3;
+ match &*witnesses {
+ [witness] => format!("`{}` not covered", pat_display(witness)),
+ [head @ .., tail] if head.len() < LIMIT => {
+ let head = head.iter().map(pat_display);
+ format!("`{}` and `{}` not covered", head.format("`, `"), pat_display(tail))
+ }
+ _ => {
+ let (head, tail) = witnesses.split_at(LIMIT);
+ let head = head.iter().map(pat_display);
+ format!("`{}` and {} more not covered", head.format("`, `"), tail.len())
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
new file mode 100644
index 000000000..d51ad72bd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -0,0 +1,508 @@
+//! Validation of matches.
+//!
+//! This module provides lowering from [hir_def::expr::Pat] to [self::Pat] and match
+//! checking algorithm.
+//!
+//! It is modeled on the rustc module `rustc_mir_build::thir::pattern`.
+
+mod pat_util;
+
+pub(crate) mod deconstruct_pat;
+pub(crate) mod usefulness;
+
+use chalk_ir::Mutability;
+use hir_def::{
+ adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, HasModule, LocalFieldId,
+ VariantId,
+};
+use hir_expand::name::{name, Name};
+use stdx::{always, never};
+
+use crate::{
+ db::HirDatabase,
+ display::{HirDisplay, HirDisplayError, HirFormatter},
+ infer::BindingMode,
+ InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
+};
+
+use self::pat_util::EnumerateAndAdjustIterator;
+
+pub(crate) use self::usefulness::MatchArm;
+
+#[derive(Clone, Debug)]
+pub(crate) enum PatternError {
+ Unimplemented,
+ UnexpectedType,
+ UnresolvedVariant,
+ MissingField,
+ ExtraFields,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) struct FieldPat {
+ pub(crate) field: LocalFieldId,
+ pub(crate) pattern: Pat,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) struct Pat {
+ pub(crate) ty: Ty,
+ pub(crate) kind: Box<PatKind>,
+}
+
+/// Close relative to `rustc_mir_build::thir::pattern::PatKind`
+#[derive(Clone, Debug, PartialEq)]
+pub(crate) enum PatKind {
+ Wild,
+
+ /// `x`, `ref x`, `x @ P`, etc.
+ Binding {
+ name: Name,
+ subpattern: Option<Pat>,
+ },
+
+ /// `Foo(...)` or `Foo{...}` or `Foo`, where `Foo` is a variant name from an ADT with
+ /// multiple variants.
+ Variant {
+ substs: Substitution,
+ enum_variant: EnumVariantId,
+ subpatterns: Vec<FieldPat>,
+ },
+
+ /// `(...)`, `Foo(...)`, `Foo{...}`, or `Foo`, where `Foo` is a variant name from an ADT with
+ /// a single variant.
+ Leaf {
+ subpatterns: Vec<FieldPat>,
+ },
+
+ /// `box P`, `&P`, `&mut P`, etc.
+ Deref {
+ subpattern: Pat,
+ },
+
+ // FIXME: for now, only bool literals are implemented
+ LiteralBool {
+ value: bool,
+ },
+
+ /// An or-pattern, e.g. `p | q`.
+ /// Invariant: `pats.len() >= 2`.
+ Or {
+ pats: Vec<Pat>,
+ },
+}
+
+pub(crate) struct PatCtxt<'a> {
+ db: &'a dyn HirDatabase,
+ infer: &'a InferenceResult,
+ body: &'a Body,
+ pub(crate) errors: Vec<PatternError>,
+}
+
+impl<'a> PatCtxt<'a> {
+ pub(crate) fn new(db: &'a dyn HirDatabase, infer: &'a InferenceResult, body: &'a Body) -> Self {
+ Self { db, infer, body, errors: Vec::new() }
+ }
+
+ pub(crate) fn lower_pattern(&mut self, pat: PatId) -> Pat {
+ // XXX(iDawer): Collecting pattern adjustments feels imprecise to me.
+ // When lowering of & and box patterns are implemented this should be tested
+ // in a manner of `match_ergonomics_issue_9095` test.
+ // Pattern adjustment is part of RFC 2005-match-ergonomics.
+ // More info https://github.com/rust-lang/rust/issues/42640#issuecomment-313535089
+ let unadjusted_pat = self.lower_pattern_unadjusted(pat);
+ self.infer.pat_adjustments.get(&pat).map(|it| &**it).unwrap_or_default().iter().rev().fold(
+ unadjusted_pat,
+ |subpattern, ref_ty| Pat {
+ ty: ref_ty.clone(),
+ kind: Box::new(PatKind::Deref { subpattern }),
+ },
+ )
+ }
+
+ fn lower_pattern_unadjusted(&mut self, pat: PatId) -> Pat {
+ let mut ty = &self.infer[pat];
+ let variant = self.infer.variant_resolution_for_pat(pat);
+
+ let kind = match self.body[pat] {
+ hir_def::expr::Pat::Wild => PatKind::Wild,
+
+ hir_def::expr::Pat::Lit(expr) => self.lower_lit(expr),
+
+ hir_def::expr::Pat::Path(ref path) => {
+ return self.lower_path(pat, path);
+ }
+
+ hir_def::expr::Pat::Tuple { ref args, ellipsis } => {
+ let arity = match *ty.kind(Interner) {
+ TyKind::Tuple(arity, _) => arity,
+ _ => {
+ never!("unexpected type for tuple pattern: {:?}", ty);
+ self.errors.push(PatternError::UnexpectedType);
+ return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
+ }
+ };
+ let subpatterns = self.lower_tuple_subpats(args, arity, ellipsis);
+ PatKind::Leaf { subpatterns }
+ }
+
+ hir_def::expr::Pat::Bind { ref name, subpat, .. } => {
+ let bm = self.infer.pat_binding_modes[&pat];
+ match (bm, ty.kind(Interner)) {
+ (BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
+ (BindingMode::Ref(_), _) => {
+ never!("`ref {}` has wrong type {:?}", name, ty);
+ self.errors.push(PatternError::UnexpectedType);
+ return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
+ }
+ _ => (),
+ }
+ PatKind::Binding { name: name.clone(), subpattern: self.lower_opt_pattern(subpat) }
+ }
+
+ hir_def::expr::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
+ let expected_len = variant.unwrap().variant_data(self.db.upcast()).fields().len();
+ let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
+ self.lower_variant_or_leaf(pat, ty, subpatterns)
+ }
+
+ hir_def::expr::Pat::Record { ref args, .. } if variant.is_some() => {
+ let variant_data = variant.unwrap().variant_data(self.db.upcast());
+ let subpatterns = args
+ .iter()
+ .map(|field| {
+ // XXX(iDawer): field lookup is inefficient
+ variant_data.field(&field.name).map(|lfield_id| FieldPat {
+ field: lfield_id,
+ pattern: self.lower_pattern(field.pat),
+ })
+ })
+ .collect();
+ match subpatterns {
+ Some(subpatterns) => self.lower_variant_or_leaf(pat, ty, subpatterns),
+ None => {
+ self.errors.push(PatternError::MissingField);
+ PatKind::Wild
+ }
+ }
+ }
+ hir_def::expr::Pat::TupleStruct { .. } | hir_def::expr::Pat::Record { .. } => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ PatKind::Wild
+ }
+
+ hir_def::expr::Pat::Or(ref pats) => PatKind::Or { pats: self.lower_patterns(pats) },
+
+ _ => {
+ self.errors.push(PatternError::Unimplemented);
+ PatKind::Wild
+ }
+ };
+
+ Pat { ty: ty.clone(), kind: Box::new(kind) }
+ }
+
+ fn lower_tuple_subpats(
+ &mut self,
+ pats: &[PatId],
+ expected_len: usize,
+ ellipsis: Option<usize>,
+ ) -> Vec<FieldPat> {
+ if pats.len() > expected_len {
+ self.errors.push(PatternError::ExtraFields);
+ return Vec::new();
+ }
+
+ pats.iter()
+ .enumerate_and_adjust(expected_len, ellipsis)
+ .map(|(i, &subpattern)| FieldPat {
+ field: LocalFieldId::from_raw((i as u32).into()),
+ pattern: self.lower_pattern(subpattern),
+ })
+ .collect()
+ }
+
+ fn lower_patterns(&mut self, pats: &[PatId]) -> Vec<Pat> {
+ pats.iter().map(|&p| self.lower_pattern(p)).collect()
+ }
+
+ fn lower_opt_pattern(&mut self, pat: Option<PatId>) -> Option<Pat> {
+ pat.map(|p| self.lower_pattern(p))
+ }
+
+ fn lower_variant_or_leaf(
+ &mut self,
+ pat: PatId,
+ ty: &Ty,
+ subpatterns: Vec<FieldPat>,
+ ) -> PatKind {
+ let kind = match self.infer.variant_resolution_for_pat(pat) {
+ Some(variant_id) => {
+ if let VariantId::EnumVariantId(enum_variant) = variant_id {
+ let substs = match ty.kind(Interner) {
+ TyKind::Adt(_, substs) => substs.clone(),
+ kind => {
+ always!(
+ matches!(kind, TyKind::FnDef(..) | TyKind::Error),
+ "inappropriate type for def: {:?}",
+ ty
+ );
+ self.errors.push(PatternError::UnexpectedType);
+ return PatKind::Wild;
+ }
+ };
+ PatKind::Variant { substs, enum_variant, subpatterns }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+ None => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ PatKind::Wild
+ }
+ };
+ kind
+ }
+
+ fn lower_path(&mut self, pat: PatId, _path: &hir_def::path::Path) -> Pat {
+ let ty = &self.infer[pat];
+
+ let pat_from_kind = |kind| Pat { ty: ty.clone(), kind: Box::new(kind) };
+
+ match self.infer.variant_resolution_for_pat(pat) {
+ Some(_) => pat_from_kind(self.lower_variant_or_leaf(pat, ty, Vec::new())),
+ None => {
+ self.errors.push(PatternError::UnresolvedVariant);
+ pat_from_kind(PatKind::Wild)
+ }
+ }
+ }
+
+ fn lower_lit(&mut self, expr: hir_def::expr::ExprId) -> PatKind {
+ use hir_def::expr::{Expr, Literal::Bool};
+
+ match self.body[expr] {
+ Expr::Literal(Bool(value)) => PatKind::LiteralBool { value },
+ _ => {
+ self.errors.push(PatternError::Unimplemented);
+ PatKind::Wild
+ }
+ }
+ }
+}
+
+impl HirDisplay for Pat {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match &*self.kind {
+ PatKind::Wild => write!(f, "_"),
+ PatKind::Binding { name, subpattern } => {
+ write!(f, "{name}")?;
+ if let Some(subpattern) = subpattern {
+ write!(f, " @ ")?;
+ subpattern.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+ PatKind::Variant { subpatterns, .. } | PatKind::Leaf { subpatterns } => {
+ let variant = match *self.kind {
+ PatKind::Variant { enum_variant, .. } => Some(VariantId::from(enum_variant)),
+ _ => self.ty.as_adt().and_then(|(adt, _)| match adt {
+ AdtId::StructId(s) => Some(s.into()),
+ AdtId::UnionId(u) => Some(u.into()),
+ AdtId::EnumId(_) => None,
+ }),
+ };
+
+ if let Some(variant) = variant {
+ match variant {
+ VariantId::EnumVariantId(v) => {
+ let data = f.db.enum_data(v.parent);
+ write!(f, "{}", data.variants[v.local_id].name)?;
+ }
+ VariantId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+ VariantId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name)?,
+ };
+
+ let variant_data = variant.variant_data(f.db.upcast());
+ if let VariantData::Record(rec_fields) = &*variant_data {
+ write!(f, " {{ ")?;
+
+ let mut printed = 0;
+ let subpats = subpatterns
+ .iter()
+ .filter(|p| !matches!(*p.pattern.kind, PatKind::Wild))
+ .map(|p| {
+ printed += 1;
+ WriteWith(move |f| {
+ write!(f, "{}: ", rec_fields[p.field].name)?;
+ p.pattern.hir_fmt(f)
+ })
+ });
+ f.write_joined(subpats, ", ")?;
+
+ if printed < rec_fields.len() {
+ write!(f, "{}..", if printed > 0 { ", " } else { "" })?;
+ }
+
+ return write!(f, " }}");
+ }
+ }
+
+ let num_fields = variant
+ .map_or(subpatterns.len(), |v| v.variant_data(f.db.upcast()).fields().len());
+ if num_fields != 0 || variant.is_none() {
+ write!(f, "(")?;
+ let subpats = (0..num_fields).map(|i| {
+ WriteWith(move |f| {
+ let fid = LocalFieldId::from_raw((i as u32).into());
+ if let Some(p) = subpatterns.get(i) {
+ if p.field == fid {
+ return p.pattern.hir_fmt(f);
+ }
+ }
+ if let Some(p) = subpatterns.iter().find(|p| p.field == fid) {
+ p.pattern.hir_fmt(f)
+ } else {
+ write!(f, "_")
+ }
+ })
+ });
+ f.write_joined(subpats, ", ")?;
+ if let (TyKind::Tuple(..), 1) = (self.ty.kind(Interner), num_fields) {
+ write!(f, ",")?;
+ }
+ write!(f, ")")?;
+ }
+
+ Ok(())
+ }
+ PatKind::Deref { subpattern } => {
+ match self.ty.kind(Interner) {
+ TyKind::Adt(adt, _) if is_box(adt.0, f.db) => write!(f, "box ")?,
+ &TyKind::Ref(mutbl, ..) => {
+ write!(f, "&{}", if mutbl == Mutability::Mut { "mut " } else { "" })?
+ }
+ _ => never!("{:?} is a bad Deref pattern type", self.ty),
+ }
+ subpattern.hir_fmt(f)
+ }
+ PatKind::LiteralBool { value } => write!(f, "{}", value),
+ PatKind::Or { pats } => f.write_joined(pats.iter(), " | "),
+ }
+ }
+}
+
+struct WriteWith<F>(F)
+where
+ F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>;
+
+impl<F> HirDisplay for WriteWith<F>
+where
+ F: Fn(&mut HirFormatter<'_>) -> Result<(), HirDisplayError>,
+{
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ (self.0)(f)
+ }
+}
+
+fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool {
+ let owned_box = name![owned_box].to_smol_str();
+ let krate = adt.module(db.upcast()).krate();
+ let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from);
+ Some(adt) == box_adt
+}
+
+pub(crate) trait PatternFoldable: Sized {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.super_fold_with(folder)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self;
+}
+
+pub(crate) trait PatternFolder: Sized {
+ fn fold_pattern(&mut self, pattern: &Pat) -> Pat {
+ pattern.super_fold_with(self)
+ }
+
+ fn fold_pattern_kind(&mut self, kind: &PatKind) -> PatKind {
+ kind.super_fold_with(self)
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Box<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ let content: T = (**self).fold_with(folder);
+ Box::new(content)
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Vec<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.iter().map(|t| t.fold_with(folder)).collect()
+ }
+}
+
+impl<T: PatternFoldable> PatternFoldable for Option<T> {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ self.as_ref().map(|t| t.fold_with(folder))
+ }
+}
+
+macro_rules! clone_impls {
+ ($($ty:ty),+) => {
+ $(
+ impl PatternFoldable for $ty {
+ fn super_fold_with<F: PatternFolder>(&self, _: &mut F) -> Self {
+ Clone::clone(self)
+ }
+ }
+ )+
+ }
+}
+
+clone_impls! { LocalFieldId, Ty, Substitution, EnumVariantId }
+
+impl PatternFoldable for FieldPat {
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) }
+ }
+}
+
+impl PatternFoldable for Pat {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern(self)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ Pat { ty: self.ty.fold_with(folder), kind: self.kind.fold_with(folder) }
+ }
+}
+
+impl PatternFoldable for PatKind {
+ fn fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern_kind(self)
+ }
+
+ fn super_fold_with<F: PatternFolder>(&self, folder: &mut F) -> Self {
+ match self {
+ PatKind::Wild => PatKind::Wild,
+ PatKind::Binding { name, subpattern } => {
+ PatKind::Binding { name: name.clone(), subpattern: subpattern.fold_with(folder) }
+ }
+ PatKind::Variant { substs, enum_variant, subpatterns } => PatKind::Variant {
+ substs: substs.fold_with(folder),
+ enum_variant: enum_variant.fold_with(folder),
+ subpatterns: subpatterns.fold_with(folder),
+ },
+ PatKind::Leaf { subpatterns } => {
+ PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) }
+ }
+ PatKind::Deref { subpattern } => {
+ PatKind::Deref { subpattern: subpattern.fold_with(folder) }
+ }
+ &PatKind::LiteralBool { value } => PatKind::LiteralBool { value },
+ PatKind::Or { pats } => PatKind::Or { pats: pats.fold_with(folder) },
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs
new file mode 100644
index 000000000..bbbe539c1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs
@@ -0,0 +1,1094 @@
+//! [`super::usefulness`] explains most of what is happening in this file. As explained there,
+//! values and patterns are made from constructors applied to fields. This file defines a
+//! `Constructor` enum, a `Fields` struct, and various operations to manipulate them and convert
+//! them from/to patterns.
+//!
+//! There's one idea that is not detailed in [`super::usefulness`] because the details are not
+//! needed there: _constructor splitting_.
+//!
+//! # Constructor splitting
+//!
+//! The idea is as follows: given a constructor `c` and a matrix, we want to specialize in turn
+//! with all the value constructors that are covered by `c`, and compute usefulness for each.
+//! Instead of listing all those constructors (which is intractable), we group those value
+//! constructors together as much as possible. Example:
+//!
+//! ```
+//! match (0, false) {
+//! (0 ..=100, true) => {} // `p_1`
+//! (50..=150, false) => {} // `p_2`
+//! (0 ..=200, _) => {} // `q`
+//! }
+//! ```
+//!
+//! The naive approach would try all numbers in the range `0..=200`. But we can be a lot more
+//! clever: `0` and `1` for example will match the exact same rows, and return equivalent
+//! witnesses. In fact all of `0..50` would. We can thus restrict our exploration to 4
+//! constructors: `0..50`, `50..=100`, `101..=150` and `151..=200`. That is enough and infinitely
+//! more tractable.
+//!
+//! We capture this idea in a function `split(p_1 ... p_n, c)` which returns a list of constructors
+//! `c'` covered by `c`. Given such a `c'`, we require that all value ctors `c''` covered by `c'`
+//! return an equivalent set of witnesses after specializing and computing usefulness.
+//! In the example above, witnesses for specializing by `c''` covered by `0..50` will only differ
+//! in their first element.
+//!
+//! We usually also ask that the `c'` together cover all of the original `c`. However we allow
+//! skipping some constructors as long as it doesn't change whether the resulting list of witnesses
+//! is empty of not. We use this in the wildcard `_` case.
+//!
+//! Splitting is implemented in the [`Constructor::split`] function. We don't do splitting for
+//! or-patterns; instead we just try the alternatives one-by-one. For details on splitting
+//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`].
+
+use std::{
+ cell::Cell,
+ cmp::{max, min},
+ iter::once,
+ ops::RangeInclusive,
+};
+
+use hir_def::{EnumVariantId, HasModule, LocalFieldId, VariantId};
+use smallvec::{smallvec, SmallVec};
+use stdx::never;
+
+use crate::{infer::normalize, AdtId, Interner, Scalar, Ty, TyExt, TyKind};
+
+use super::{
+ is_box,
+ usefulness::{helper::Captures, MatchCheckCtx, PatCtxt},
+ FieldPat, Pat, PatKind,
+};
+
+use self::Constructor::*;
+
+/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
+fn expand_or_pat(pat: &Pat) -> Vec<&Pat> {
+ fn expand<'p>(pat: &'p Pat, vec: &mut Vec<&'p Pat>) {
+ if let PatKind::Or { pats } = pat.kind.as_ref() {
+ for pat in pats {
+ expand(pat, vec);
+ }
+ } else {
+ vec.push(pat)
+ }
+ }
+
+ let mut pats = Vec::new();
+ expand(pat, &mut pats);
+ pats
+}
+
+/// [Constructor] uses this in umimplemented variants.
+/// It allows porting match expressions from upstream algorithm without losing semantics.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(super) enum Void {}
+
+/// An inclusive interval, used for precise integer exhaustiveness checking.
+/// `IntRange`s always store a contiguous range. This means that values are
+/// encoded such that `0` encodes the minimum value for the integer,
+/// regardless of the signedness.
+/// For example, the pattern `-128..=127i8` is encoded as `0..=255`.
+/// This makes comparisons and arithmetic on interval endpoints much more
+/// straightforward. See `signed_bias` for details.
+///
+/// `IntRange` is never used to encode an empty range or a "range" that wraps
+/// around the (offset) space: i.e., `range.lo <= range.hi`.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(super) struct IntRange {
+ range: RangeInclusive<u128>,
+}
+
+impl IntRange {
+ #[inline]
+ fn is_integral(ty: &Ty) -> bool {
+ matches!(
+ ty.kind(Interner),
+ TyKind::Scalar(Scalar::Char | Scalar::Int(_) | Scalar::Uint(_) | Scalar::Bool)
+ )
+ }
+
+ fn is_singleton(&self) -> bool {
+ self.range.start() == self.range.end()
+ }
+
+ fn boundaries(&self) -> (u128, u128) {
+ (*self.range.start(), *self.range.end())
+ }
+
+ #[inline]
+ fn from_bool(value: bool) -> IntRange {
+ let val = value as u128;
+ IntRange { range: val..=val }
+ }
+
+ #[inline]
+ fn from_range(lo: u128, hi: u128, scalar_ty: Scalar) -> IntRange {
+ match scalar_ty {
+ Scalar::Bool => IntRange { range: lo..=hi },
+ _ => unimplemented!(),
+ }
+ }
+
+ fn is_subrange(&self, other: &Self) -> bool {
+ other.range.start() <= self.range.start() && self.range.end() <= other.range.end()
+ }
+
+ fn intersection(&self, other: &Self) -> Option<Self> {
+ let (lo, hi) = self.boundaries();
+ let (other_lo, other_hi) = other.boundaries();
+ if lo <= other_hi && other_lo <= hi {
+ Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi) })
+ } else {
+ None
+ }
+ }
+
+ fn to_pat(&self, _cx: &MatchCheckCtx<'_, '_>, ty: Ty) -> Pat {
+ match ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Bool) => {
+ let kind = match self.boundaries() {
+ (0, 0) => PatKind::LiteralBool { value: false },
+ (1, 1) => PatKind::LiteralBool { value: true },
+ (0, 1) => PatKind::Wild,
+ (lo, hi) => {
+ never!("bad range for bool pattern: {}..={}", lo, hi);
+ PatKind::Wild
+ }
+ };
+ Pat { ty, kind: kind.into() }
+ }
+ _ => unimplemented!(),
+ }
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(&self, other: &Self) -> bool {
+ if self.intersection(other).is_some() {
+ // Constructor splitting should ensure that all intersections we encounter are actually
+ // inclusions.
+ assert!(self.is_subrange(other));
+ true
+ } else {
+ false
+ }
+ }
+}
+
+/// Represents a border between 2 integers. Because the intervals spanning borders must be able to
+/// cover every integer, we need to be able to represent 2^128 + 1 such borders.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+enum IntBorder {
+ JustBefore(u128),
+ AfterMax,
+}
+
+/// A range of integers that is partitioned into disjoint subranges. This does constructor
+/// splitting for integer ranges as explained at the top of the file.
+///
+/// This is fed multiple ranges, and returns an output that covers the input, but is split so that
+/// the only intersections between an output range and a seen range are inclusions. No output range
+/// straddles the boundary of one of the inputs.
+///
+/// The following input:
+/// ```
+/// |-------------------------| // `self`
+/// |------| |----------| |----|
+/// |-------| |-------|
+/// ```
+/// would be iterated over as follows:
+/// ```
+/// ||---|--||-|---|---|---|--|
+/// ```
+#[derive(Debug, Clone)]
+struct SplitIntRange {
+ /// The range we are splitting
+ range: IntRange,
+ /// The borders of ranges we have seen. They are all contained within `range`. This is kept
+ /// sorted.
+ borders: Vec<IntBorder>,
+}
+
+impl SplitIntRange {
+ fn new(range: IntRange) -> Self {
+ SplitIntRange { range, borders: Vec::new() }
+ }
+
+ /// Internal use
+ fn to_borders(r: IntRange) -> [IntBorder; 2] {
+ use IntBorder::*;
+ let (lo, hi) = r.boundaries();
+ let lo = JustBefore(lo);
+ let hi = match hi.checked_add(1) {
+ Some(m) => JustBefore(m),
+ None => AfterMax,
+ };
+ [lo, hi]
+ }
+
+ /// Add ranges relative to which we split.
+ fn split(&mut self, ranges: impl Iterator<Item = IntRange>) {
+ let this_range = &self.range;
+ let included_ranges = ranges.filter_map(|r| this_range.intersection(&r));
+ let included_borders = included_ranges.flat_map(|r| {
+ let borders = Self::to_borders(r);
+ once(borders[0]).chain(once(borders[1]))
+ });
+ self.borders.extend(included_borders);
+ self.borders.sort_unstable();
+ }
+
+ /// Iterate over the contained ranges.
+ fn iter(&self) -> impl Iterator<Item = IntRange> + '_ {
+ use IntBorder::*;
+
+ let self_range = Self::to_borders(self.range.clone());
+ // Start with the start of the range.
+ let mut prev_border = self_range[0];
+ self.borders
+ .iter()
+ .copied()
+ // End with the end of the range.
+ .chain(once(self_range[1]))
+ // List pairs of adjacent borders.
+ .map(move |border| {
+ let ret = (prev_border, border);
+ prev_border = border;
+ ret
+ })
+ // Skip duplicates.
+ .filter(|(prev_border, border)| prev_border != border)
+ // Finally, convert to ranges.
+ .map(|(prev_border, border)| {
+ let range = match (prev_border, border) {
+ (JustBefore(n), JustBefore(m)) if n < m => n..=(m - 1),
+ (JustBefore(n), AfterMax) => n..=u128::MAX,
+ _ => unreachable!(), // Ruled out by the sorting and filtering we did
+ };
+ IntRange { range }
+ })
+ }
+}
+
+/// A constructor for array and slice patterns.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(super) struct Slice {
+ _unimplemented: Void,
+}
+
+impl Slice {
+ fn arity(self) -> usize {
+ match self._unimplemented {}
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(self, _other: Self) -> bool {
+ match self._unimplemented {}
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// the constructor. See also `Fields`.
+///
+/// `pat_constructor` retrieves the constructor corresponding to a pattern.
+/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a
+/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
+/// `Fields`.
+#[allow(dead_code)]
+#[derive(Clone, Debug, PartialEq)]
+pub(super) enum Constructor {
+ /// The constructor for patterns that have a single constructor, like tuples, struct patterns
+ /// and fixed-length arrays.
+ Single,
+ /// Enum variants.
+ Variant(EnumVariantId),
+ /// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
+ IntRange(IntRange),
+ /// Ranges of floating-point literal values (`2.0..=5.2`).
+ FloatRange(Void),
+ /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
+ Str(Void),
+ /// Array and slice patterns.
+ Slice(Slice),
+ /// Constants that must not be matched structurally. They are treated as black
+ /// boxes for the purposes of exhaustiveness: we must not inspect them, and they
+ /// don't count towards making a match exhaustive.
+ Opaque,
+ /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used
+ /// for those types for which we cannot list constructors explicitly, like `f64` and `str`.
+ NonExhaustive,
+ /// Stands for constructors that are not seen in the matrix, as explained in the documentation
+ /// for [`SplitWildcard`]. The carried `bool` is used for the `non_exhaustive_omitted_patterns`
+ /// lint.
+ Missing { nonexhaustive_enum_missing_real_variants: bool },
+ /// Wildcard pattern.
+ Wildcard,
+ /// Or-pattern.
+ Or,
+}
+
+impl Constructor {
+ pub(super) fn is_wildcard(&self) -> bool {
+ matches!(self, Wildcard)
+ }
+
+ pub(super) fn is_non_exhaustive(&self) -> bool {
+ matches!(self, NonExhaustive)
+ }
+
+ fn as_int_range(&self) -> Option<&IntRange> {
+ match self {
+ IntRange(range) => Some(range),
+ _ => None,
+ }
+ }
+
+ fn as_slice(&self) -> Option<Slice> {
+ match self {
+ Slice(slice) => Some(*slice),
+ _ => None,
+ }
+ }
+
+ pub(super) fn is_unstable_variant(&self, _pcx: PatCtxt<'_, '_>) -> bool {
+ false //FIXME: implement this
+ }
+
+ pub(super) fn is_doc_hidden_variant(&self, _pcx: PatCtxt<'_, '_>) -> bool {
+ false //FIXME: implement this
+ }
+
+ fn variant_id_for_adt(&self, adt: hir_def::AdtId) -> VariantId {
+ match *self {
+ Variant(id) => id.into(),
+ Single => {
+ assert!(!matches!(adt, hir_def::AdtId::EnumId(_)));
+ match adt {
+ hir_def::AdtId::EnumId(_) => unreachable!(),
+ hir_def::AdtId::StructId(id) => id.into(),
+ hir_def::AdtId::UnionId(id) => id.into(),
+ }
+ }
+ _ => panic!("bad constructor {:?} for adt {:?}", self, adt),
+ }
+ }
+
+ /// The number of fields for this constructor. This must be kept in sync with
+ /// `Fields::wildcards`.
+ pub(super) fn arity(&self, pcx: PatCtxt<'_, '_>) -> usize {
+ match self {
+ Single | Variant(_) => match *pcx.ty.kind(Interner) {
+ TyKind::Tuple(arity, ..) => arity,
+ TyKind::Ref(..) => 1,
+ TyKind::Adt(adt, ..) => {
+ if is_box(adt.0, pcx.cx.db) {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ 1
+ } else {
+ let variant = self.variant_id_for_adt(adt.0);
+ Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count()
+ }
+ }
+ _ => {
+ never!("Unexpected type for `Single` constructor: {:?}", pcx.ty);
+ 0
+ }
+ },
+ Slice(slice) => slice.arity(),
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => 0,
+ Or => {
+ never!("The `Or` constructor doesn't have a fixed arity");
+ 0
+ }
+ }
+ }
+
+ /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual
+ /// constructors (like variants, integers or fixed-sized slices). When specializing for these
+ /// constructors, we want to be specialising for the actual underlying constructors.
+ /// Naively, we would simply return the list of constructors they correspond to. We instead are
+ /// more clever: if there are constructors that we know will behave the same wrt the current
+ /// matrix, we keep them grouped. For example, all slices of a sufficiently large length
+ /// will either be all useful or all non-useful with a given matrix.
+ ///
+ /// See the branches for details on how the splitting is done.
+ ///
+ /// This function may discard some irrelevant constructors if this preserves behavior and
+ /// diagnostics. Eg. for the `_` case, we ignore the constructors already present in the
+ /// matrix, unless all of them are.
+ pub(super) fn split<'a>(
+ &self,
+ pcx: PatCtxt<'_, '_>,
+ ctors: impl Iterator<Item = &'a Constructor> + Clone,
+ ) -> SmallVec<[Self; 1]> {
+ match self {
+ Wildcard => {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, ctors);
+ split_wildcard.into_ctors(pcx)
+ }
+ // Fast-track if the range is trivial. In particular, we don't do the overlapping
+ // ranges check.
+ IntRange(ctor_range) if !ctor_range.is_singleton() => {
+ let mut split_range = SplitIntRange::new(ctor_range.clone());
+ let int_ranges = ctors.filter_map(|ctor| ctor.as_int_range());
+ split_range.split(int_ranges.cloned());
+ split_range.iter().map(IntRange).collect()
+ }
+ Slice(slice) => match slice._unimplemented {},
+ // Any other constructor can be used unchanged.
+ _ => smallvec![self.clone()],
+ }
+ }
+
+ /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
+ /// For the simple cases, this is simply checking for equality. For the "grouped" constructors,
+ /// this checks for inclusion.
+ // We inline because this has a single call site in `Matrix::specialize_constructor`.
+ #[inline]
+ pub(super) fn is_covered_by(&self, _pcx: PatCtxt<'_, '_>, other: &Self) -> bool {
+ // This must be kept in sync with `is_covered_by_any`.
+ match (self, other) {
+ // Wildcards cover anything
+ (_, Wildcard) => true,
+ // The missing ctors are not covered by anything in the matrix except wildcards.
+ (Missing { .. } | Wildcard, _) => false,
+
+ (Single, Single) => true,
+ (Variant(self_id), Variant(other_id)) => self_id == other_id,
+
+ (IntRange(self_range), IntRange(other_range)) => self_range.is_covered_by(other_range),
+ (FloatRange(void), FloatRange(..)) => match *void {},
+ (Str(void), Str(..)) => match *void {},
+ (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice),
+
+ // We are trying to inspect an opaque constant. Thus we skip the row.
+ (Opaque, _) | (_, Opaque) => false,
+ // Only a wildcard pattern can match the special extra constructor.
+ (NonExhaustive, _) => false,
+
+ _ => {
+ never!("trying to compare incompatible constructors {:?} and {:?}", self, other);
+ // Continue with 'whatever is covered' supposed to result in false no-error diagnostic.
+ true
+ }
+ }
+ }
+
+ /// Faster version of `is_covered_by` when applied to many constructors. `used_ctors` is
+ /// assumed to be built from `matrix.head_ctors()` with wildcards filtered out, and `self` is
+ /// assumed to have been split from a wildcard.
+ fn is_covered_by_any(&self, _pcx: PatCtxt<'_, '_>, used_ctors: &[Constructor]) -> bool {
+ if used_ctors.is_empty() {
+ return false;
+ }
+
+ // This must be kept in sync with `is_covered_by`.
+ match self {
+ // If `self` is `Single`, `used_ctors` cannot contain anything else than `Single`s.
+ Single => !used_ctors.is_empty(),
+ Variant(_) => used_ctors.iter().any(|c| c == self),
+ IntRange(range) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_int_range())
+ .any(|other| range.is_covered_by(other)),
+ Slice(slice) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_slice())
+ .any(|other| slice.is_covered_by(other)),
+ // This constructor is never covered by anything else
+ NonExhaustive => false,
+ Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard | Or => {
+ never!("found unexpected ctor in all_ctors: {:?}", self);
+ true
+ }
+ }
+ }
+}
+
+/// A wildcard constructor that we split relative to the constructors in the matrix, as explained
+/// at the top of the file.
+///
+/// A constructor that is not present in the matrix rows will only be covered by the rows that have
+/// wildcards. Thus we can group all of those constructors together; we call them "missing
+/// constructors". Splitting a wildcard would therefore list all present constructors individually
+/// (or grouped if they are integers or slices), and then all missing constructors together as a
+/// group.
+///
+/// However we can go further: since any constructor will match the wildcard rows, and having more
+/// rows can only reduce the amount of usefulness witnesses, we can skip the present constructors
+/// and only try the missing ones.
+/// This will not preserve the whole list of witnesses, but will preserve whether the list is empty
+/// or not. In fact this is quite natural from the point of view of diagnostics too. This is done
+/// in `to_ctors`: in some cases we only return `Missing`.
+#[derive(Debug)]
+pub(super) struct SplitWildcard {
+ /// Constructors seen in the matrix.
+ matrix_ctors: Vec<Constructor>,
+ /// All the constructors for this type
+ all_ctors: SmallVec<[Constructor; 1]>,
+}
+
+impl SplitWildcard {
+ pub(super) fn new(pcx: PatCtxt<'_, '_>) -> Self {
+ let cx = pcx.cx;
+ let make_range = |start, end, scalar| IntRange(IntRange::from_range(start, end, scalar));
+
+ // Unhandled types are treated as non-exhaustive. Being explicit here instead of falling
+ // to catchall arm to ease further implementation.
+ let unhandled = || smallvec![NonExhaustive];
+
+ // This determines the set of all possible constructors for the type `pcx.ty`. For numbers,
+ // arrays and slices we use ranges and variable-length slices when appropriate.
+ //
+ // If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that
+ // are statically impossible. E.g., for `Option<!>`, we do not include `Some(_)` in the
+ // returned list of constructors.
+ // Invariant: this is empty if and only if the type is uninhabited (as determined by
+ // `cx.is_uninhabited()`).
+ let all_ctors = match pcx.ty.kind(Interner) {
+ TyKind::Scalar(Scalar::Bool) => smallvec![make_range(0, 1, Scalar::Bool)],
+ // TyKind::Array(..) if ... => unhandled(),
+ TyKind::Array(..) | TyKind::Slice(..) => unhandled(),
+ &TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ..) => {
+ let enum_data = cx.db.enum_data(enum_id);
+
+ // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an
+ // additional "unknown" constructor.
+ // There is no point in enumerating all possible variants, because the user can't
+ // actually match against them all themselves. So we always return only the fictitious
+ // constructor.
+ // E.g., in an example like:
+ //
+ // ```
+ // let err: io::ErrorKind = ...;
+ // match err {
+ // io::ErrorKind::NotFound => {},
+ // }
+ // ```
+ //
+ // we don't want to show every possible IO error, but instead have only `_` as the
+ // witness.
+ let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty);
+
+ let is_exhaustive_pat_feature = cx.feature_exhaustive_patterns();
+
+ // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it
+ // as though it had an "unknown" constructor to avoid exposing its emptiness. The
+ // exception is if the pattern is at the top level, because we want empty matches to be
+ // considered exhaustive.
+ let is_secretly_empty = enum_data.variants.is_empty()
+ && !is_exhaustive_pat_feature
+ && !pcx.is_top_level;
+
+ let mut ctors: SmallVec<[_; 1]> = enum_data
+ .variants
+ .iter()
+ .filter(|&(_, _v)| {
+ // If `exhaustive_patterns` is enabled, we exclude variants known to be
+ // uninhabited.
+ let is_uninhabited = is_exhaustive_pat_feature
+ && unimplemented!("after MatchCheckCtx.feature_exhaustive_patterns()");
+ !is_uninhabited
+ })
+ .map(|(local_id, _)| Variant(EnumVariantId { parent: enum_id, local_id }))
+ .collect();
+
+ if is_secretly_empty || is_declared_nonexhaustive {
+ ctors.push(NonExhaustive);
+ }
+ ctors
+ }
+ TyKind::Scalar(Scalar::Char) => unhandled(),
+ TyKind::Scalar(Scalar::Int(..) | Scalar::Uint(..)) => unhandled(),
+ TyKind::Never if !cx.feature_exhaustive_patterns() && !pcx.is_top_level => {
+ smallvec![NonExhaustive]
+ }
+ TyKind::Never => SmallVec::new(),
+ _ if cx.is_uninhabited(pcx.ty) => SmallVec::new(),
+ TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single],
+ // This type is one for which we cannot list constructors, like `str` or `f64`.
+ _ => smallvec![NonExhaustive],
+ };
+
+ SplitWildcard { matrix_ctors: Vec::new(), all_ctors }
+ }
+
+ /// Pass a set of constructors relative to which to split this one. Don't call twice, it won't
+ /// do what you want.
+ pub(super) fn split<'a>(
+ &mut self,
+ pcx: PatCtxt<'_, '_>,
+ ctors: impl Iterator<Item = &'a Constructor> + Clone,
+ ) {
+ // Since `all_ctors` never contains wildcards, this won't recurse further.
+ self.all_ctors =
+ self.all_ctors.iter().flat_map(|ctor| ctor.split(pcx, ctors.clone())).collect();
+ self.matrix_ctors = ctors.filter(|c| !c.is_wildcard()).cloned().collect();
+ }
+
+ /// Whether there are any value constructors for this type that are not present in the matrix.
+ fn any_missing(&self, pcx: PatCtxt<'_, '_>) -> bool {
+ self.iter_missing(pcx).next().is_some()
+ }
+
+ /// Iterate over the constructors for this type that are not present in the matrix.
+ pub(super) fn iter_missing<'a, 'p>(
+ &'a self,
+ pcx: PatCtxt<'a, 'p>,
+ ) -> impl Iterator<Item = &'a Constructor> + Captures<'p> {
+ self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.matrix_ctors))
+ }
+
+ /// Return the set of constructors resulting from splitting the wildcard. As explained at the
+ /// top of the file, if any constructors are missing we can ignore the present ones.
+ fn into_ctors(self, pcx: PatCtxt<'_, '_>) -> SmallVec<[Constructor; 1]> {
+ if self.any_missing(pcx) {
+ // Some constructors are missing, thus we can specialize with the special `Missing`
+ // constructor, which stands for those constructors that are not seen in the matrix,
+ // and matches the same rows as any of them (namely the wildcard rows). See the top of
+ // the file for details.
+ // However, when all constructors are missing we can also specialize with the full
+ // `Wildcard` constructor. The difference will depend on what we want in diagnostics.
+
+ // If some constructors are missing, we typically want to report those constructors,
+ // e.g.:
+ // ```
+ // enum Direction { N, S, E, W }
+ // let Direction::N = ...;
+ // ```
+ // we can report 3 witnesses: `S`, `E`, and `W`.
+ //
+ // However, if the user didn't actually specify a constructor
+ // in this arm, e.g., in
+ // ```
+ // let x: (Direction, Direction, bool) = ...;
+ // let (_, _, false) = x;
+ // ```
+ // we don't want to show all 16 possible witnesses `(<direction-1>, <direction-2>,
+ // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we
+ // prefer to report just a wildcard `_`.
+ //
+ // The exception is: if we are at the top-level, for example in an empty match, we
+ // sometimes prefer reporting the list of constructors instead of just `_`.
+ let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty);
+ let ctor = if !self.matrix_ctors.is_empty() || report_when_all_missing {
+ if pcx.is_non_exhaustive {
+ Missing {
+ nonexhaustive_enum_missing_real_variants: self
+ .iter_missing(pcx)
+ .any(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx))),
+ }
+ } else {
+ Missing { nonexhaustive_enum_missing_real_variants: false }
+ }
+ } else {
+ Wildcard
+ };
+ return smallvec![ctor];
+ }
+
+ // All the constructors are present in the matrix, so we just go through them all.
+ self.all_ctors
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// those fields, generalized to allow patterns in each field. See also `Constructor`.
+///
+/// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that
+/// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then
+/// given a pattern we fill some of the fields with its subpatterns.
+/// In the following example `Fields::wildcards` returns `[_, _, _, _]`. Then in
+/// `extract_pattern_arguments` we fill some of the entries, and the result is
+/// `[Some(0), _, _, _]`.
+/// ```rust
+/// let x: [Option<u8>; 4] = foo();
+/// match x {
+/// [Some(0), ..] => {}
+/// }
+/// ```
+///
+/// Note that the number of fields of a constructor may not match the fields declared in the
+/// original struct/variant. This happens if a private or `non_exhaustive` field is uninhabited,
+/// because the code mustn't observe that it is uninhabited. In that case that field is not
+/// included in `fields`. For that reason, when you have a `mir::Field` you must use
+/// `index_with_declared_idx`.
+#[derive(Clone, Copy)]
+pub(super) struct Fields<'p> {
+ fields: &'p [DeconstructedPat<'p>],
+}
+
+impl<'p> Fields<'p> {
+ fn empty() -> Self {
+ Fields { fields: &[] }
+ }
+
+ fn singleton(cx: &MatchCheckCtx<'_, 'p>, field: DeconstructedPat<'p>) -> Self {
+ let field = cx.pattern_arena.alloc(field);
+ Fields { fields: std::slice::from_ref(field) }
+ }
+
+ pub(super) fn from_iter(
+ cx: &MatchCheckCtx<'_, 'p>,
+ fields: impl IntoIterator<Item = DeconstructedPat<'p>>,
+ ) -> Self {
+ let fields: &[_] = cx.pattern_arena.alloc_extend(fields);
+ Fields { fields }
+ }
+
+ fn wildcards_from_tys(cx: &MatchCheckCtx<'_, 'p>, tys: impl IntoIterator<Item = Ty>) -> Self {
+ Fields::from_iter(cx, tys.into_iter().map(DeconstructedPat::wildcard))
+ }
+
+ // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
+ // uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
+ // This lists the fields we keep along with their types.
+ fn list_variant_nonhidden_fields<'a>(
+ cx: &'a MatchCheckCtx<'a, 'p>,
+ ty: &'a Ty,
+ variant: VariantId,
+ ) -> impl Iterator<Item = (LocalFieldId, Ty)> + Captures<'a> + Captures<'p> {
+ let (adt, substs) = ty.as_adt().unwrap();
+
+ let adt_is_local = variant.module(cx.db.upcast()).krate() == cx.module.krate();
+ // Whether we must not match the fields of this variant exhaustively.
+ let is_non_exhaustive = is_field_list_non_exhaustive(variant, cx) && !adt_is_local;
+
+ let visibility = cx.db.field_visibilities(variant);
+ let field_ty = cx.db.field_types(variant);
+ let fields_len = variant.variant_data(cx.db.upcast()).fields().len() as u32;
+
+ (0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).filter_map(move |fid| {
+ let ty = field_ty[fid].clone().substitute(Interner, substs);
+ let ty = normalize(cx.db, cx.body, ty);
+ let is_visible = matches!(adt, hir_def::AdtId::EnumId(..))
+ || visibility[fid].is_visible_from(cx.db.upcast(), cx.module);
+ let is_uninhabited = cx.is_uninhabited(&ty);
+
+ if is_uninhabited && (!is_visible || is_non_exhaustive) {
+ None
+ } else {
+ Some((fid, ty))
+ }
+ })
+ }
+
+ /// Creates a new list of wildcard fields for a given constructor. The result must have a
+ /// length of `constructor.arity()`.
+ pub(crate) fn wildcards(
+ cx: &MatchCheckCtx<'_, 'p>,
+ ty: &Ty,
+ constructor: &Constructor,
+ ) -> Self {
+ let ret = match constructor {
+ Single | Variant(_) => match ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ let tys = substs.iter(Interner).map(|ty| ty.assert_ty_ref(Interner));
+ Fields::wildcards_from_tys(cx, tys.cloned())
+ }
+ TyKind::Ref(.., rty) => Fields::wildcards_from_tys(cx, once(rty.clone())),
+ &TyKind::Adt(AdtId(adt), ref substs) => {
+ if is_box(adt, cx.db) {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ let subst_ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ Fields::wildcards_from_tys(cx, once(subst_ty))
+ } else {
+ let variant = constructor.variant_id_for_adt(adt);
+ let tys = Fields::list_variant_nonhidden_fields(cx, ty, variant)
+ .map(|(_, ty)| ty);
+ Fields::wildcards_from_tys(cx, tys)
+ }
+ }
+ ty_kind => {
+ never!("Unexpected type for `Single` constructor: {:?}", ty_kind);
+ Fields::wildcards_from_tys(cx, once(ty.clone()))
+ }
+ },
+ Slice(slice) => match slice._unimplemented {},
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => Fields::empty(),
+ Or => {
+ never!("called `Fields::wildcards` on an `Or` ctor");
+ Fields::empty()
+ }
+ };
+ ret
+ }
+
+ /// Returns the list of patterns.
+ pub(super) fn iter_patterns<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Captures<'a> {
+ self.fields.iter()
+ }
+}
+
+/// Values and patterns can be represented as a constructor applied to some fields. This represents
+/// a pattern in this form.
+/// This also keeps track of whether the pattern has been found reachable during analysis. For this
+/// reason we should be careful not to clone patterns for which we care about that. Use
+/// `clone_and_forget_reachability` if you're sure.
+pub(crate) struct DeconstructedPat<'p> {
+ ctor: Constructor,
+ fields: Fields<'p>,
+ ty: Ty,
+ reachable: Cell<bool>,
+}
+
+impl<'p> DeconstructedPat<'p> {
+ pub(super) fn wildcard(ty: Ty) -> Self {
+ Self::new(Wildcard, Fields::empty(), ty)
+ }
+
+ pub(super) fn new(ctor: Constructor, fields: Fields<'p>, ty: Ty) -> Self {
+ DeconstructedPat { ctor, fields, ty, reachable: Cell::new(false) }
+ }
+
+ /// Construct a pattern that matches everything that starts with this constructor.
+ /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
+ /// `Some(_)`.
+ pub(super) fn wild_from_ctor(pcx: PatCtxt<'_, 'p>, ctor: Constructor) -> Self {
+ let fields = Fields::wildcards(pcx.cx, pcx.ty, &ctor);
+ DeconstructedPat::new(ctor, fields, pcx.ty.clone())
+ }
+
+ /// Clone this value. This method emphasizes that cloning loses reachability information and
+ /// should be done carefully.
+ pub(super) fn clone_and_forget_reachability(&self) -> Self {
+ DeconstructedPat::new(self.ctor.clone(), self.fields, self.ty.clone())
+ }
+
+ pub(crate) fn from_pat(cx: &MatchCheckCtx<'_, 'p>, pat: &Pat) -> Self {
+ let mkpat = |pat| DeconstructedPat::from_pat(cx, pat);
+ let ctor;
+ let fields;
+ match pat.kind.as_ref() {
+ PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat),
+ PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
+ ctor = Wildcard;
+ fields = Fields::empty();
+ }
+ PatKind::Deref { subpattern } => {
+ ctor = Single;
+ fields = Fields::singleton(cx, mkpat(subpattern));
+ }
+ PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
+ match pat.ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ ctor = Single;
+ let mut wilds: SmallVec<[_; 2]> = substs
+ .iter(Interner)
+ .map(|arg| arg.assert_ty_ref(Interner).clone())
+ .map(DeconstructedPat::wildcard)
+ .collect();
+ for pat in subpatterns {
+ let idx: u32 = pat.field.into_raw().into();
+ wilds[idx as usize] = mkpat(&pat.pattern);
+ }
+ fields = Fields::from_iter(cx, wilds)
+ }
+ TyKind::Adt(adt, substs) if is_box(adt.0, cx.db) => {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
+ // _)` or a box pattern. As a hack to avoid an ICE with the former, we
+ // ignore other fields than the first one. This will trigger an error later
+ // anyway.
+ // See https://github.com/rust-lang/rust/issues/82772 ,
+ // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
+ // The problem is that we can't know from the type whether we'll match
+ // normally or through box-patterns. We'll have to figure out a proper
+ // solution when we introduce generalized deref patterns. Also need to
+ // prevent mixing of those two options.
+ let pat =
+ subpatterns.iter().find(|pat| pat.field.into_raw() == 0u32.into());
+ let field = if let Some(pat) = pat {
+ mkpat(&pat.pattern)
+ } else {
+ let ty = substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ DeconstructedPat::wildcard(ty)
+ };
+ ctor = Single;
+ fields = Fields::singleton(cx, field)
+ }
+ &TyKind::Adt(adt, _) => {
+ ctor = match pat.kind.as_ref() {
+ PatKind::Leaf { .. } => Single,
+ PatKind::Variant { enum_variant, .. } => Variant(*enum_variant),
+ _ => {
+ never!();
+ Wildcard
+ }
+ };
+ let variant = ctor.variant_id_for_adt(adt.0);
+ let fields_len = variant.variant_data(cx.db.upcast()).fields().len();
+ // For each field in the variant, we store the relevant index into `self.fields` if any.
+ let mut field_id_to_id: Vec<Option<usize>> = vec![None; fields_len];
+ let tys = Fields::list_variant_nonhidden_fields(cx, &pat.ty, variant)
+ .enumerate()
+ .map(|(i, (fid, ty))| {
+ let field_idx: u32 = fid.into_raw().into();
+ field_id_to_id[field_idx as usize] = Some(i);
+ ty
+ });
+ let mut wilds: SmallVec<[_; 2]> =
+ tys.map(DeconstructedPat::wildcard).collect();
+ for pat in subpatterns {
+ let field_idx: u32 = pat.field.into_raw().into();
+ if let Some(i) = field_id_to_id[field_idx as usize] {
+ wilds[i] = mkpat(&pat.pattern);
+ }
+ }
+ fields = Fields::from_iter(cx, wilds);
+ }
+ _ => {
+ never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
+ ctor = Wildcard;
+ fields = Fields::empty();
+ }
+ }
+ }
+ &PatKind::LiteralBool { value } => {
+ ctor = IntRange(IntRange::from_bool(value));
+ fields = Fields::empty();
+ }
+ PatKind::Or { .. } => {
+ ctor = Or;
+ let pats: SmallVec<[_; 2]> = expand_or_pat(pat).into_iter().map(mkpat).collect();
+ fields = Fields::from_iter(cx, pats)
+ }
+ }
+ DeconstructedPat::new(ctor, fields, pat.ty.clone())
+ }
+
+ pub(crate) fn to_pat(&self, cx: &MatchCheckCtx<'_, 'p>) -> Pat {
+ let mut subpatterns = self.iter_fields().map(|p| p.to_pat(cx));
+ let pat = match &self.ctor {
+ Single | Variant(_) => match self.ty.kind(Interner) {
+ TyKind::Tuple(..) => PatKind::Leaf {
+ subpatterns: subpatterns
+ .zip(0u32..)
+ .map(|(p, i)| FieldPat {
+ field: LocalFieldId::from_raw(i.into()),
+ pattern: p,
+ })
+ .collect(),
+ },
+ TyKind::Adt(adt, _) if is_box(adt.0, cx.db) => {
+ // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
+ // of `std`). So this branch is only reachable when the feature is enabled and
+ // the pattern is a box pattern.
+ PatKind::Deref { subpattern: subpatterns.next().unwrap() }
+ }
+ TyKind::Adt(adt, substs) => {
+ let variant = self.ctor.variant_id_for_adt(adt.0);
+ let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty(), variant)
+ .zip(subpatterns)
+ .map(|((field, _ty), pattern)| FieldPat { field, pattern })
+ .collect();
+
+ if let VariantId::EnumVariantId(enum_variant) = variant {
+ PatKind::Variant { substs: substs.clone(), enum_variant, subpatterns }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+ // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
+ // be careful to reconstruct the correct constant pattern here. However a string
+ // literal pattern will never be reported as a non-exhaustiveness witness, so we
+ // ignore this issue.
+ TyKind::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
+ _ => {
+ never!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty);
+ PatKind::Wild
+ }
+ },
+ &Slice(slice) => match slice._unimplemented {},
+ &Str(void) => match void {},
+ &FloatRange(void) => match void {},
+ IntRange(range) => return range.to_pat(cx, self.ty.clone()),
+ Wildcard | NonExhaustive => PatKind::Wild,
+ Missing { .. } => {
+ never!(
+ "trying to convert a `Missing` constructor into a `Pat`; this is a bug, \
+ `Missing` should have been processed in `apply_constructors`"
+ );
+ PatKind::Wild
+ }
+ Opaque | Or => {
+ never!("can't convert to pattern: {:?}", self.ctor);
+ PatKind::Wild
+ }
+ };
+ Pat { ty: self.ty.clone(), kind: Box::new(pat) }
+ }
+
+ pub(super) fn is_or_pat(&self) -> bool {
+ matches!(self.ctor, Or)
+ }
+
+ pub(super) fn ctor(&self) -> &Constructor {
+ &self.ctor
+ }
+
+ pub(super) fn ty(&self) -> &Ty {
+ &self.ty
+ }
+
+ pub(super) fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + 'a {
+ self.fields.iter_patterns()
+ }
+
+ /// Specialize this pattern with a constructor.
+ /// `other_ctor` can be different from `self.ctor`, but must be covered by it.
+ pub(super) fn specialize<'a>(
+ &'a self,
+ cx: &MatchCheckCtx<'_, 'p>,
+ other_ctor: &Constructor,
+ ) -> SmallVec<[&'p DeconstructedPat<'p>; 2]> {
+ match (&self.ctor, other_ctor) {
+ (Wildcard, _) => {
+ // We return a wildcard for each field of `other_ctor`.
+ Fields::wildcards(cx, &self.ty, other_ctor).iter_patterns().collect()
+ }
+ (Slice(self_slice), Slice(other_slice))
+ if self_slice.arity() != other_slice.arity() =>
+ {
+ match self_slice._unimplemented {}
+ }
+ _ => self.fields.iter_patterns().collect(),
+ }
+ }
+
+ /// We keep track for each pattern if it was ever reachable during the analysis. This is used
+ /// with `unreachable_spans` to report unreachable subpatterns arising from or patterns.
+ pub(super) fn set_reachable(&self) {
+ self.reachable.set(true)
+ }
+ pub(super) fn is_reachable(&self) -> bool {
+ self.reachable.get()
+ }
+}
+
+fn is_field_list_non_exhaustive(variant_id: VariantId, cx: &MatchCheckCtx<'_, '_>) -> bool {
+ let attr_def_id = match variant_id {
+ VariantId::EnumVariantId(id) => id.into(),
+ VariantId::StructId(id) => id.into(),
+ VariantId::UnionId(id) => id.into(),
+ };
+ cx.db.attrs(attr_def_id).by_key("non_exhaustive").exists()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs
new file mode 100644
index 000000000..b89b4f2bf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_util.rs
@@ -0,0 +1,56 @@
+//! Pattern untilities.
+//!
+//! Originates from `rustc_hir::pat_util`
+
+use std::iter::{Enumerate, ExactSizeIterator};
+
+pub(crate) struct EnumerateAndAdjust<I> {
+ enumerate: Enumerate<I>,
+ gap_pos: usize,
+ gap_len: usize,
+}
+
+impl<I> Iterator for EnumerateAndAdjust<I>
+where
+ I: Iterator,
+{
+ type Item = (usize, <I as Iterator>::Item);
+
+ fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
+ self.enumerate
+ .next()
+ .map(|(i, elem)| (if i < self.gap_pos { i } else { i + self.gap_len }, elem))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.enumerate.size_hint()
+ }
+}
+
+pub(crate) trait EnumerateAndAdjustIterator {
+ fn enumerate_and_adjust(
+ self,
+ expected_len: usize,
+ gap_pos: Option<usize>,
+ ) -> EnumerateAndAdjust<Self>
+ where
+ Self: Sized;
+}
+
+impl<T: ExactSizeIterator> EnumerateAndAdjustIterator for T {
+ fn enumerate_and_adjust(
+ self,
+ expected_len: usize,
+ gap_pos: Option<usize>,
+ ) -> EnumerateAndAdjust<Self>
+ where
+ Self: Sized,
+ {
+ let actual_len = self.len();
+ EnumerateAndAdjust {
+ enumerate: self.enumerate(),
+ gap_pos: gap_pos.unwrap_or(expected_len),
+ gap_len: expected_len - actual_len,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs
new file mode 100644
index 000000000..1221327b9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/usefulness.rs
@@ -0,0 +1,811 @@
+//! Based on rust-lang/rust (last sync f31622a50 2021-11-12)
+//! <https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs>
+//!
+//! -----
+//!
+//! This file includes the logic for exhaustiveness and reachability checking for pattern-matching.
+//! Specifically, given a list of patterns for a type, we can tell whether:
+//! (a) each pattern is reachable (reachability)
+//! (b) the patterns cover every possible value for the type (exhaustiveness)
+//!
+//! The algorithm implemented here is a modified version of the one described in [this
+//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however generalized
+//! it to accommodate the variety of patterns that Rust supports. We thus explain our version here,
+//! without being as rigorous.
+//!
+//!
+//! # Summary
+//!
+//! The core of the algorithm is the notion of "usefulness". A pattern `q` is said to be *useful*
+//! relative to another pattern `p` of the same type if there is a value that is matched by `q` and
+//! not matched by `p`. This generalizes to many `p`s: `q` is useful w.r.t. a list of patterns
+//! `p_1 .. p_n` if there is a value that is matched by `q` and by none of the `p_i`. We write
+//! `usefulness(p_1 .. p_n, q)` for a function that returns a list of such values. The aim of this
+//! file is to compute it efficiently.
+//!
+//! This is enough to compute reachability: a pattern in a `match` expression is reachable iff it
+//! is useful w.r.t. the patterns above it:
+//! ```rust
+//! match x {
+//! Some(_) => ...,
+//! None => ..., // reachable: `None` is matched by this but not the branch above
+//! Some(0) => ..., // unreachable: all the values this matches are already matched by
+//! // `Some(_)` above
+//! }
+//! ```
+//!
+//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
+//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
+//! are used to tell the user which values are missing.
+//! ```rust
+//! match x {
+//! Some(0) => ...,
+//! None => ...,
+//! // not exhaustive: `_` is useful because it matches `Some(1)`
+//! }
+//! ```
+//!
+//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
+//! reachability for each match branch and exhaustiveness for the whole match.
+//!
+//!
+//! # Constructors and fields
+//!
+//! Note: we will often abbreviate "constructor" as "ctor".
+//!
+//! The idea that powers everything that is done in this file is the following: a (matcheable)
+//! value is made from a constructor applied to a number of subvalues. Examples of constructors are
+//! `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor for a struct
+//! `Foo`), and `2` (the constructor for the number `2`). This is natural when we think of
+//! pattern-matching, and this is the basis for what follows.
+//!
+//! Some of the ctors listed above might feel weird: `None` and `2` don't take any arguments.
+//! That's ok: those are ctors that take a list of 0 arguments; they are the simplest case of
+//! ctors. We treat `2` as a ctor because `u64` and other number types behave exactly like a huge
+//! `enum`, with one variant for each number. This allows us to see any matcheable value as made up
+//! from a tree of ctors, each having a set number of children. For example: `Foo { bar: None,
+//! baz: Ok(0) }` is made from 4 different ctors, namely `Foo{..}`, `None`, `Ok` and `0`.
+//!
+//! This idea can be extended to patterns: they are also made from constructors applied to fields.
+//! A pattern for a given type is allowed to use all the ctors for values of that type (which we
+//! call "value constructors"), but there are also pattern-only ctors. The most important one is
+//! the wildcard (`_`), and the others are integer ranges (`0..=10`), variable-length slices (`[x,
+//! ..]`), and or-patterns (`Ok(0) | Err(_)`). Examples of valid patterns are `42`, `Some(_)`, `Foo
+//! { bar: Some(0) | None, baz: _ }`. Note that a binder in a pattern (e.g. `Some(x)`) matches the
+//! same values as a wildcard (e.g. `Some(_)`), so we treat both as wildcards.
+//!
+//! From this deconstruction we can compute whether a given value matches a given pattern; we
+//! simply look at ctors one at a time. Given a pattern `p` and a value `v`, we want to compute
+//! `matches!(v, p)`. It's mostly straightforward: we compare the head ctors and when they match
+//! we compare their fields recursively. A few representative examples:
+//!
+//! - `matches!(v, _) := true`
+//! - `matches!((v0, v1), (p0, p1)) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
+//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
+//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
+//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
+//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
+//! - `matches!(v, p0 | p1) := matches!(v, p0) || matches!(v, p1)`
+//!
+//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] module.
+//!
+//! Note: this constructors/fields distinction may not straightforwardly apply to every Rust type.
+//! For example a value of type `Rc<u64>` can't be deconstructed that way, and `&str` has an
+//! infinitude of constructors. There are also subtleties with visibility of fields and
+//! uninhabitedness and various other things. The constructors idea can be extended to handle most
+//! of these subtleties though; caveats are documented where relevant throughout the code.
+//!
+//! Whether constructors cover each other is computed by [`Constructor::is_covered_by`].
+//!
+//!
+//! # Specialization
+//!
+//! Recall that we wish to compute `usefulness(p_1 .. p_n, q)`: given a list of patterns `p_1 ..
+//! p_n` and a pattern `q`, all of the same type, we want to find a list of values (called
+//! "witnesses") that are matched by `q` and by none of the `p_i`. We obviously don't just
+//! enumerate all possible values. From the discussion above we see that we can proceed
+//! ctor-by-ctor: for each value ctor of the given type, we ask "is there a value that starts with
+//! this constructor and matches `q` and none of the `p_i`?". As we saw above, there's a lot we can
+//! say from knowing only the first constructor of our candidate value.
+//!
+//! Let's take the following example:
+//! ```
+//! match x {
+//! Enum::Variant1(_) => {} // `p1`
+//! Enum::Variant2(None, 0) => {} // `p2`
+//! Enum::Variant2(Some(_), 0) => {} // `q`
+//! }
+//! ```
+//!
+//! We can easily see that if our candidate value `v` starts with `Variant1` it will not match `q`.
+//! If `v = Variant2(v0, v1)` however, whether or not it matches `p2` and `q` will depend on `v0`
+//! and `v1`. In fact, such a `v` will be a witness of usefulness of `q` exactly when the tuple
+//! `(v0, v1)` is a witness of usefulness of `q'` in the following reduced match:
+//!
+//! ```
+//! match x {
+//! (None, 0) => {} // `p2'`
+//! (Some(_), 0) => {} // `q'`
+//! }
+//! ```
+//!
+//! This motivates a new step in computing usefulness, that we call _specialization_.
+//! Specialization consist of filtering a list of patterns for those that match a constructor, and
+//! then looking into the constructor's fields. This enables usefulness to be computed recursively.
+//!
+//! Instead of acting on a single pattern in each row, we will consider a list of patterns for each
+//! row, and we call such a list a _pattern-stack_. The idea is that we will specialize the
+//! leftmost pattern, which amounts to popping the constructor and pushing its fields, which feels
+//! like a stack. We note a pattern-stack simply with `[p_1 ... p_n]`.
+//! Here's a sequence of specializations of a list of pattern-stacks, to illustrate what's
+//! happening:
+//! ```
+//! [Enum::Variant1(_)]
+//! [Enum::Variant2(None, 0)]
+//! [Enum::Variant2(Some(_), 0)]
+//! //==>> specialize with `Variant2`
+//! [None, 0]
+//! [Some(_), 0]
+//! //==>> specialize with `Some`
+//! [_, 0]
+//! //==>> specialize with `true` (say the type was `bool`)
+//! [0]
+//! //==>> specialize with `0`
+//! []
+//! ```
+//!
+//! The function `specialize(c, p)` takes a value constructor `c` and a pattern `p`, and returns 0
+//! or more pattern-stacks. If `c` does not match the head constructor of `p`, it returns nothing;
+//! otherwise if returns the fields of the constructor. This only returns more than one
+//! pattern-stack if `p` has a pattern-only constructor.
+//!
+//! - Specializing for the wrong constructor returns nothing
+//!
+//! `specialize(None, Some(p0)) := []`
+//!
+//! - Specializing for the correct constructor returns a single row with the fields
+//!
+//! `specialize(Variant1, Variant1(p0, p1, p2)) := [[p0, p1, p2]]`
+//!
+//! `specialize(Foo{..}, Foo { bar: p0, baz: p1 }) := [[p0, p1]]`
+//!
+//! - For or-patterns, we specialize each branch and concatenate the results
+//!
+//! `specialize(c, p0 | p1) := specialize(c, p0) ++ specialize(c, p1)`
+//!
+//! - We treat the other pattern constructors as if they were a large or-pattern of all the
+//! possibilities:
+//!
+//! `specialize(c, _) := specialize(c, Variant1(_) | Variant2(_, _) | ...)`
+//!
+//! `specialize(c, 1..=100) := specialize(c, 1 | ... | 100)`
+//!
+//! `specialize(c, [p0, .., p1]) := specialize(c, [p0, p1] | [p0, _, p1] | [p0, _, _, p1] | ...)`
+//!
+//! - If `c` is a pattern-only constructor, `specialize` is defined on a case-by-case basis. See
+//! the discussion about constructor splitting in [`super::deconstruct_pat`].
+//!
+//!
+//! We then extend this function to work with pattern-stacks as input, by acting on the first
+//! column and keeping the other columns untouched.
+//!
+//! Specialization for the whole matrix is done in [`Matrix::specialize_constructor`]. Note that
+//! or-patterns in the first column are expanded before being stored in the matrix. Specialization
+//! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and
+//! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the
+//! [`Fields`] struct.
+//!
+//!
+//! # Computing usefulness
+//!
+//! We now have all we need to compute usefulness. The inputs to usefulness are a list of
+//! pattern-stacks `p_1 ... p_n` (one per row), and a new pattern_stack `q`. The paper and this
+//! file calls the list of patstacks a _matrix_. They must all have the same number of columns and
+//! the patterns in a given column must all have the same type. `usefulness` returns a (possibly
+//! empty) list of witnesses of usefulness. These witnesses will also be pattern-stacks.
+//!
+//! - base case: `n_columns == 0`.
+//! Since a pattern-stack functions like a tuple of patterns, an empty one functions like the
+//! unit type. Thus `q` is useful iff there are no rows above it, i.e. if `n == 0`.
+//!
+//! - inductive case: `n_columns > 0`.
+//! We need a way to list the constructors we want to try. We will be more clever in the next
+//! section but for now assume we list all value constructors for the type of the first column.
+//!
+//! - for each such ctor `c`:
+//!
+//! - for each `q'` returned by `specialize(c, q)`:
+//!
+//! - we compute `usefulness(specialize(c, p_1) ... specialize(c, p_n), q')`
+//!
+//! - for each witness found, we revert specialization by pushing the constructor `c` on top.
+//!
+//! - We return the concatenation of all the witnesses found, if any.
+//!
+//! Example:
+//! ```
+//! [Some(true)] // p_1
+//! [None] // p_2
+//! [Some(_)] // q
+//! //==>> try `None`: `specialize(None, q)` returns nothing
+//! //==>> try `Some`: `specialize(Some, q)` returns a single row
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `true`: `specialize(true, q')` returns a single row
+//! [] // p_1''
+//! [] // q''
+//! //==>> base case; `n != 0` so `q''` is not useful.
+//! //==>> go back up a step
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `false`: `specialize(false, q')` returns a single row
+//! [] // q''
+//! //==>> base case; `n == 0` so `q''` is useful. We return the single witness `[]`
+//! witnesses:
+//! []
+//! //==>> undo the specialization with `false`
+//! witnesses:
+//! [false]
+//! //==>> undo the specialization with `Some`
+//! witnesses:
+//! [Some(false)]
+//! //==>> we have tried all the constructors. The output is the single witness `[Some(false)]`.
+//! ```
+//!
+//! This computation is done in [`is_useful`]. In practice we don't care about the list of
+//! witnesses when computing reachability; we only need to know whether any exist. We do keep the
+//! witnesses when computing exhaustiveness to report them to the user.
+//!
+//!
+//! # Making usefulness tractable: constructor splitting
+//!
+//! We're missing one last detail: which constructors do we list? Naively listing all value
+//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The
+//! first obvious insight is that we only want to list constructors that are covered by the head
+//! constructor of `q`. If it's a value constructor, we only try that one. If it's a pattern-only
+//! constructor, we use the final clever idea for this algorithm: _constructor splitting_, where we
+//! group together constructors that behave the same.
+//!
+//! The details are not necessary to understand this file, so we explain them in
+//! [`super::deconstruct_pat`]. Splitting is done by the [`Constructor::split`] function.
+
+use std::iter::once;
+
+use hir_def::{AdtId, DefWithBodyId, HasModule, ModuleId};
+use smallvec::{smallvec, SmallVec};
+use typed_arena::Arena;
+
+use crate::{db::HirDatabase, Ty, TyExt};
+
+use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard};
+
+use self::{helper::Captures, ArmType::*, Usefulness::*};
+
+pub(crate) struct MatchCheckCtx<'a, 'p> {
+ pub(crate) module: ModuleId,
+ pub(crate) body: DefWithBodyId,
+ pub(crate) db: &'a dyn HirDatabase,
+ /// Lowered patterns from arms plus generated by the check.
+ pub(crate) pattern_arena: &'p Arena<DeconstructedPat<'p>>,
+}
+
+impl<'a, 'p> MatchCheckCtx<'a, 'p> {
+ pub(super) fn is_uninhabited(&self, _ty: &Ty) -> bool {
+ // FIXME(iDawer) implement exhaustive_patterns feature. More info in:
+ // Tracking issue for RFC 1872: exhaustive_patterns feature https://github.com/rust-lang/rust/issues/51085
+ false
+ }
+
+ /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
+ pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: &Ty) -> bool {
+ match ty.as_adt() {
+ Some((adt @ AdtId::EnumId(_), _)) => {
+ let has_non_exhaustive_attr =
+ self.db.attrs(adt.into()).by_key("non_exhaustive").exists();
+ let is_local = adt.module(self.db.upcast()).krate() == self.module.krate();
+ has_non_exhaustive_attr && !is_local
+ }
+ _ => false,
+ }
+ }
+
+ // Rust feature described as "Allows exhaustive pattern matching on types that contain uninhabited types."
+ pub(super) fn feature_exhaustive_patterns(&self) -> bool {
+ // FIXME see MatchCheckCtx::is_uninhabited
+ false
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(super) struct PatCtxt<'a, 'p> {
+ pub(super) cx: &'a MatchCheckCtx<'a, 'p>,
+ /// Type of the current column under investigation.
+ pub(super) ty: &'a Ty,
+ /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
+ /// subpattern.
+ pub(super) is_top_level: bool,
+ /// Whether the current pattern is from a `non_exhaustive` enum.
+ pub(super) is_non_exhaustive: bool,
+}
+
+/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
+/// works well.
+#[derive(Clone)]
+pub(super) struct PatStack<'p> {
+ pats: SmallVec<[&'p DeconstructedPat<'p>; 2]>,
+}
+
+impl<'p> PatStack<'p> {
+ fn from_pattern(pat: &'p DeconstructedPat<'p>) -> Self {
+ Self::from_vec(smallvec![pat])
+ }
+
+ fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p>; 2]>) -> Self {
+ PatStack { pats: vec }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.pats.is_empty()
+ }
+
+ fn len(&self) -> usize {
+ self.pats.len()
+ }
+
+ fn head(&self) -> &'p DeconstructedPat<'p> {
+ self.pats[0]
+ }
+
+ // Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
+ // or-pattern. Panics if `self` is empty.
+ fn expand_or_pat(&self) -> impl Iterator<Item = PatStack<'p>> + Captures<'_> {
+ self.head().iter_fields().map(move |pat| {
+ let mut new_patstack = PatStack::from_pattern(pat);
+ new_patstack.pats.extend_from_slice(&self.pats[1..]);
+ new_patstack
+ })
+ }
+
+ /// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
+ ///
+ /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
+ /// fields filled with wild patterns.
+ ///
+ /// This is roughly the inverse of `Constructor::apply`.
+ fn pop_head_constructor(&self, cx: &MatchCheckCtx<'_, 'p>, ctor: &Constructor) -> PatStack<'p> {
+ // We pop the head pattern and push the new fields extracted from the arguments of
+ // `self.head()`.
+ let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(cx, ctor);
+ new_fields.extend_from_slice(&self.pats[1..]);
+ PatStack::from_vec(new_fields)
+ }
+}
+
+/// A 2D matrix.
+#[derive(Clone)]
+pub(super) struct Matrix<'p> {
+ patterns: Vec<PatStack<'p>>,
+}
+
+impl<'p> Matrix<'p> {
+ fn empty() -> Self {
+ Matrix { patterns: vec![] }
+ }
+
+ /// Number of columns of this matrix. `None` is the matrix is empty.
+ pub(super) fn _column_count(&self) -> Option<usize> {
+ self.patterns.get(0).map(|r| r.len())
+ }
+
+ /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
+ /// expands it.
+ fn push(&mut self, row: PatStack<'p>) {
+ if !row.is_empty() && row.head().is_or_pat() {
+ self.patterns.extend(row.expand_or_pat());
+ } else {
+ self.patterns.push(row);
+ }
+ }
+
+ /// Iterate over the first component of each row
+ fn heads(&self) -> impl Iterator<Item = &'p DeconstructedPat<'p>> + Clone + Captures<'_> {
+ self.patterns.iter().map(|r| r.head())
+ }
+
+ /// This computes `S(constructor, self)`. See top of the file for explanations.
+ fn specialize_constructor(&self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Matrix<'p> {
+ let mut matrix = Matrix::empty();
+ for row in &self.patterns {
+ if ctor.is_covered_by(pcx, row.head().ctor()) {
+ let new_row = row.pop_head_constructor(pcx.cx, ctor);
+ matrix.push(new_row);
+ }
+ }
+ matrix
+ }
+}
+
+/// This carries the results of computing usefulness, as described at the top of the file. When
+/// checking usefulness of a match branch, we use the `NoWitnesses` variant, which also keeps track
+/// of potential unreachable sub-patterns (in the presence of or-patterns). When checking
+/// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of
+/// witnesses of non-exhaustiveness when there are any.
+/// Which variant to use is dictated by `ArmType`.
+enum Usefulness<'p> {
+ /// If we don't care about witnesses, simply remember if the pattern was useful.
+ NoWitnesses { useful: bool },
+ /// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
+ /// pattern is unreachable.
+ WithWitnesses(Vec<Witness<'p>>),
+}
+
+impl<'p> Usefulness<'p> {
+ fn new_useful(preference: ArmType) -> Self {
+ match preference {
+ // A single (empty) witness of reachability.
+ FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]),
+ RealArm => NoWitnesses { useful: true },
+ }
+ }
+ fn new_not_useful(preference: ArmType) -> Self {
+ match preference {
+ FakeExtraWildcard => WithWitnesses(vec![]),
+ RealArm => NoWitnesses { useful: false },
+ }
+ }
+
+ fn is_useful(&self) -> bool {
+ match self {
+ Usefulness::NoWitnesses { useful } => *useful,
+ Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(),
+ }
+ }
+
+ /// Combine usefulnesses from two branches. This is an associative operation.
+ fn extend(&mut self, other: Self) {
+ match (&mut *self, other) {
+ (WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {}
+ (WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o),
+ (WithWitnesses(s), WithWitnesses(o)) => s.extend(o),
+ (NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => {
+ *s_useful = *s_useful || o_useful
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ /// After calculating usefulness after a specialization, call this to reconstruct a usefulness
+ /// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
+ /// with the results of specializing with the other constructors.
+ fn apply_constructor(
+ self,
+ pcx: PatCtxt<'_, 'p>,
+ matrix: &Matrix<'p>,
+ ctor: &Constructor,
+ ) -> Self {
+ match self {
+ NoWitnesses { .. } => self,
+ WithWitnesses(ref witnesses) if witnesses.is_empty() => self,
+ WithWitnesses(witnesses) => {
+ let new_witnesses = if let Constructor::Missing { .. } = ctor {
+ // We got the special `Missing` constructor, so each of the missing constructors
+ // gives a new pattern that is not caught by the match. We list those patterns.
+ let new_patterns = if pcx.is_non_exhaustive {
+ // Here we don't want the user to try to list all variants, we want them to add
+ // a wildcard, so we only suggest that.
+ vec![DeconstructedPat::wildcard(pcx.ty.clone())]
+ } else {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+
+ // This lets us know if we skipped any variants because they are marked
+ // `doc(hidden)` or they are unstable feature gate (only stdlib types).
+ let mut hide_variant_show_wild = false;
+ // Construct for each missing constructor a "wild" version of this
+ // constructor, that matches everything that can be built with
+ // it. For example, if `ctor` is a `Constructor::Variant` for
+ // `Option::Some`, we get the pattern `Some(_)`.
+ let mut new: Vec<DeconstructedPat<'_>> = split_wildcard
+ .iter_missing(pcx)
+ .filter_map(|missing_ctor| {
+ // Check if this variant is marked `doc(hidden)`
+ if missing_ctor.is_doc_hidden_variant(pcx)
+ || missing_ctor.is_unstable_variant(pcx)
+ {
+ hide_variant_show_wild = true;
+ return None;
+ }
+ Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone()))
+ })
+ .collect();
+
+ if hide_variant_show_wild {
+ new.push(DeconstructedPat::wildcard(pcx.ty.clone()))
+ }
+
+ new
+ };
+
+ witnesses
+ .into_iter()
+ .flat_map(|witness| {
+ new_patterns.iter().map(move |pat| {
+ Witness(
+ witness
+ .0
+ .iter()
+ .chain(once(pat))
+ .map(DeconstructedPat::clone_and_forget_reachability)
+ .collect(),
+ )
+ })
+ })
+ .collect()
+ } else {
+ witnesses
+ .into_iter()
+ .map(|witness| witness.apply_constructor(pcx, ctor))
+ .collect()
+ };
+ WithWitnesses(new_witnesses)
+ }
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum ArmType {
+ FakeExtraWildcard,
+ RealArm,
+}
+
+/// A witness of non-exhaustiveness for error reporting, represented
+/// as a list of patterns (in reverse order of construction) with
+/// wildcards inside to represent elements that can take any inhabitant
+/// of the type as a value.
+///
+/// A witness against a list of patterns should have the same types
+/// and length as the pattern matched against. Because Rust `match`
+/// is always against a single pattern, at the end the witness will
+/// have length 1, but in the middle of the algorithm, it can contain
+/// multiple patterns.
+///
+/// For example, if we are constructing a witness for the match against
+///
+/// ```
+/// struct Pair(Option<(u32, u32)>, bool);
+///
+/// match (p: Pair) {
+/// Pair(None, _) => {}
+/// Pair(_, false) => {}
+/// }
+/// ```
+///
+/// We'll perform the following steps:
+/// 1. Start with an empty witness
+/// `Witness(vec![])`
+/// 2. Push a witness `true` against the `false`
+/// `Witness(vec![true])`
+/// 3. Push a witness `Some(_)` against the `None`
+/// `Witness(vec![true, Some(_)])`
+/// 4. Apply the `Pair` constructor to the witnesses
+/// `Witness(vec![Pair(Some(_), true)])`
+///
+/// The final `Pair(Some(_), true)` is then the resulting witness.
+pub(crate) struct Witness<'p>(Vec<DeconstructedPat<'p>>);
+
+impl<'p> Witness<'p> {
+ /// Asserts that the witness contains a single pattern, and returns it.
+ fn single_pattern(self) -> DeconstructedPat<'p> {
+ assert_eq!(self.0.len(), 1);
+ self.0.into_iter().next().unwrap()
+ }
+
+ /// Constructs a partial witness for a pattern given a list of
+ /// patterns expanded by the specialization step.
+ ///
+ /// When a pattern P is discovered to be useful, this function is used bottom-up
+ /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset
+ /// of values, V, where each value in that set is not covered by any previously
+ /// used patterns and is covered by the pattern P'. Examples:
+ ///
+ /// left_ty: tuple of 3 elements
+ /// pats: [10, 20, _] => (10, 20, _)
+ ///
+ /// left_ty: struct X { a: (bool, &'static str), b: usize}
+ /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
+ fn apply_constructor(mut self, pcx: PatCtxt<'_, 'p>, ctor: &Constructor) -> Self {
+ let pat = {
+ let len = self.0.len();
+ let arity = ctor.arity(pcx);
+ let pats = self.0.drain((len - arity)..).rev();
+ let fields = Fields::from_iter(pcx.cx, pats);
+ DeconstructedPat::new(ctor.clone(), fields, pcx.ty.clone())
+ };
+
+ self.0.push(pat);
+
+ self
+ }
+}
+
+/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
+/// The algorithm from the paper has been modified to correctly handle empty
+/// types. The changes are:
+/// (0) We don't exit early if the pattern matrix has zero rows. We just
+/// continue to recurse over columns.
+/// (1) all_constructors will only return constructors that are statically
+/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
+///
+/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
+/// to a set of such vectors `m` - this is defined as there being a set of
+/// inputs that will match `v` but not any of the sets in `m`.
+///
+/// All the patterns at each column of the `matrix ++ v` matrix must have the same type.
+///
+/// This is used both for reachability checking (if a pattern isn't useful in
+/// relation to preceding patterns, it is not reachable) and exhaustiveness
+/// checking (if a wildcard pattern is useful in relation to a matrix, the
+/// matrix isn't exhaustive).
+///
+/// `is_under_guard` is used to inform if the pattern has a guard. If it
+/// has one it must not be inserted into the matrix. This shouldn't be
+/// relied on for soundness.
+fn is_useful<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ matrix: &Matrix<'p>,
+ v: &PatStack<'p>,
+ witness_preference: ArmType,
+ is_under_guard: bool,
+ is_top_level: bool,
+) -> Usefulness<'p> {
+ let Matrix { patterns: rows, .. } = matrix;
+
+ // The base case. We are pattern-matching on () and the return value is
+ // based on whether our matrix has a row or not.
+ // NOTE: This could potentially be optimized by checking rows.is_empty()
+ // first and then, if v is non-empty, the return value is based on whether
+ // the type of the tuple we're checking is inhabited or not.
+ if v.is_empty() {
+ let ret = if rows.is_empty() {
+ Usefulness::new_useful(witness_preference)
+ } else {
+ Usefulness::new_not_useful(witness_preference)
+ };
+ return ret;
+ }
+
+ debug_assert!(rows.iter().all(|r| r.len() == v.len()));
+
+ let ty = v.head().ty();
+ let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty);
+ let pcx = PatCtxt { cx, ty, is_top_level, is_non_exhaustive };
+
+ // If the first pattern is an or-pattern, expand it.
+ let mut ret = Usefulness::new_not_useful(witness_preference);
+ if v.head().is_or_pat() {
+ // We try each or-pattern branch in turn.
+ let mut matrix = matrix.clone();
+ for v in v.expand_or_pat() {
+ let usefulness = is_useful(cx, &matrix, &v, witness_preference, is_under_guard, false);
+ ret.extend(usefulness);
+ // If pattern has a guard don't add it to the matrix.
+ if !is_under_guard {
+ // We push the already-seen patterns into the matrix in order to detect redundant
+ // branches like `Some(_) | Some(0)`.
+ matrix.push(v);
+ }
+ }
+ } else {
+ let v_ctor = v.head().ctor();
+
+ // FIXME: implement `overlapping_range_endpoints` lint
+
+ // We split the head constructor of `v`.
+ let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+ // For each constructor, we compute whether there's a value that starts with it that would
+ // witness the usefulness of `v`.
+ let start_matrix = matrix;
+ for ctor in split_ctors {
+ // We cache the result of `Fields::wildcards` because it is used a lot.
+ let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
+ let v = v.pop_head_constructor(cx, &ctor);
+ let usefulness =
+ is_useful(cx, &spec_matrix, &v, witness_preference, is_under_guard, false);
+ let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
+
+ // FIXME: implement `non_exhaustive_omitted_patterns` lint
+
+ ret.extend(usefulness);
+ }
+ };
+
+ if ret.is_useful() {
+ v.head().set_reachable();
+ }
+
+ ret
+}
+
+/// The arm of a match expression.
+#[derive(Clone, Copy)]
+pub(crate) struct MatchArm<'p> {
+ pub(crate) pat: &'p DeconstructedPat<'p>,
+ pub(crate) has_guard: bool,
+}
+
+/// Indicates whether or not a given arm is reachable.
+#[derive(Clone, Debug)]
+pub(crate) enum Reachability {
+ /// The arm is reachable. This additionally carries a set of or-pattern branches that have been
+ /// found to be unreachable despite the overall arm being reachable. Used only in the presence
+ /// of or-patterns, otherwise it stays empty.
+ // FIXME: store ureachable subpattern IDs
+ Reachable,
+ /// The arm is unreachable.
+ Unreachable,
+}
+
+/// The output of checking a match for exhaustiveness and arm reachability.
+pub(crate) struct UsefulnessReport<'p> {
+ /// For each arm of the input, whether that arm is reachable after the arms above it.
+ pub(crate) _arm_usefulness: Vec<(MatchArm<'p>, Reachability)>,
+ /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
+ /// exhaustiveness.
+ pub(crate) non_exhaustiveness_witnesses: Vec<DeconstructedPat<'p>>,
+}
+
+/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
+/// of its arms are reachable.
+///
+/// Note: the input patterns must have been lowered through
+/// `check_match::MatchVisitor::lower_pattern`.
+pub(crate) fn compute_match_usefulness<'p>(
+ cx: &MatchCheckCtx<'_, 'p>,
+ arms: &[MatchArm<'p>],
+ scrut_ty: &Ty,
+) -> UsefulnessReport<'p> {
+ let mut matrix = Matrix::empty();
+ let arm_usefulness = arms
+ .iter()
+ .copied()
+ .map(|arm| {
+ let v = PatStack::from_pattern(arm.pat);
+ is_useful(cx, &matrix, &v, RealArm, arm.has_guard, true);
+ if !arm.has_guard {
+ matrix.push(v);
+ }
+ let reachability = if arm.pat.is_reachable() {
+ Reachability::Reachable
+ } else {
+ Reachability::Unreachable
+ };
+ (arm, reachability)
+ })
+ .collect();
+
+ let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty.clone()));
+ let v = PatStack::from_pattern(wild_pattern);
+ let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, false, true);
+ let non_exhaustiveness_witnesses = match usefulness {
+ WithWitnesses(pats) => pats.into_iter().map(Witness::single_pattern).collect(),
+ NoWitnesses { .. } => panic!("bug"),
+ };
+ UsefulnessReport { _arm_usefulness: arm_usefulness, non_exhaustiveness_witnesses }
+}
+
+pub(crate) mod helper {
+ // Copy-pasted from rust/compiler/rustc_data_structures/src/captures.rs
+ /// "Signaling" trait used in impl trait to tag lifetimes that you may
+ /// need to capture but don't really need for other reasons.
+ /// Basically a workaround; see [this comment] for details.
+ ///
+ /// [this comment]: https://github.com/rust-lang/rust/issues/34511#issuecomment-373423999
+ // FIXME(eddyb) false positive, the lifetime parameter is "phantom" but needed.
+ #[allow(unused_lifetimes)]
+ pub(crate) trait Captures<'a> {}
+
+ impl<'a, T: ?Sized> Captures<'a> for T {}
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
new file mode 100644
index 000000000..161b19a73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -0,0 +1,104 @@
+//! Provides validations for unsafe code. Currently checks if unsafe functions are missing
+//! unsafe blocks.
+
+use hir_def::{
+ body::Body,
+ expr::{Expr, ExprId, UnaryOp},
+ resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
+ DefWithBodyId,
+};
+
+use crate::{
+ db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TyExt, TyKind,
+};
+
+pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
+ let infer = db.infer(def);
+ let mut res = Vec::new();
+
+ let is_unsafe = match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
+ DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
+ };
+ if is_unsafe {
+ return res;
+ }
+
+ let body = db.body(def);
+ unsafe_expressions(db, &infer, def, &body, body.body_expr, &mut |expr| {
+ if !expr.inside_unsafe_block {
+ res.push(expr.expr);
+ }
+ });
+
+ res
+}
+
+pub struct UnsafeExpr {
+ pub expr: ExprId,
+ pub inside_unsafe_block: bool,
+}
+
+// FIXME: Move this out, its not a diagnostic only thing anymore, and handle unsafe pattern accesses as well
+pub fn unsafe_expressions(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+ body: &Body,
+ current: ExprId,
+ unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
+) {
+ walk_unsafe(db, infer, def, body, current, false, unsafe_expr_cb)
+}
+
+fn walk_unsafe(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+ body: &Body,
+ current: ExprId,
+ inside_unsafe_block: bool,
+ unsafe_expr_cb: &mut dyn FnMut(UnsafeExpr),
+) {
+ let expr = &body.exprs[current];
+ match expr {
+ &Expr::Call { callee, .. } => {
+ if let Some(func) = infer[callee].as_fn_def(db) {
+ if is_fn_unsafe_to_call(db, func) {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::Path(path) => {
+ let resolver = resolver_for_expr(db.upcast(), def, current);
+ let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
+ if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
+ if db.static_data(id).mutable {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::MethodCall { .. } => {
+ if infer
+ .method_resolution(current)
+ .map(|(func, _)| is_fn_unsafe_to_call(db, func))
+ .unwrap_or(false)
+ {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
+ if let TyKind::Raw(..) = &infer[*expr].kind(Interner) {
+ unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::Unsafe { body: child } => {
+ return walk_unsafe(db, infer, def, body, *child, true, unsafe_expr_cb);
+ }
+ _ => {}
+ }
+
+ expr.walk_child_exprs(|child| {
+ walk_unsafe(db, infer, def, body, child, inside_unsafe_block, unsafe_expr_cb);
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
new file mode 100644
index 000000000..d2f9c2b8b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -0,0 +1,1315 @@
+//! The `HirDisplay` trait, which serves two purposes: Turning various bits from
+//! HIR back into source code, and just displaying them for debugging/testing
+//! purposes.
+
+use std::fmt::{self, Debug};
+
+use base_db::CrateId;
+use chalk_ir::BoundVar;
+use hir_def::{
+ body,
+ db::DefDatabase,
+ find_path,
+ generics::{TypeOrConstParamData, TypeParamProvenance},
+ intern::{Internable, Interned},
+ item_scope::ItemInNs,
+ path::{Path, PathKind},
+ type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef},
+ visibility::Visibility,
+ HasModule, ItemContainerId, Lookup, ModuleId, TraitId,
+};
+use hir_expand::{hygiene::Hygiene, name::Name};
+use itertools::Itertools;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase,
+ from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
+ mapping::from_chalk,
+ primitive, subst_prefix, to_assoc_type_id,
+ utils::{self, generics},
+ AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal,
+ GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability,
+ OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar, Substitution, TraitRef,
+ TraitRefExt, Ty, TyExt, TyKind, WhereClause,
+};
+
+pub struct HirFormatter<'a> {
+ pub db: &'a dyn HirDatabase,
+ fmt: &'a mut dyn fmt::Write,
+ buf: String,
+ curr_size: usize,
+ pub(crate) max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+pub trait HirDisplay {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError>;
+
+ /// Returns a `Display`able type that is human-readable.
+ fn into_displayable<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ assert!(
+ !matches!(display_target, DisplayTarget::SourceCode { .. }),
+ "HirDisplayWrapper cannot fail with DisplaySourceCodeError, use HirDisplay::hir_fmt directly instead"
+ );
+ HirDisplayWrapper { db, t: self, max_size, omit_verbose_types, display_target }
+ }
+
+ /// Returns a `Display`able type that is human-readable.
+ /// Use this for showing types to the user (e.g. diagnostics)
+ fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a `Display`able type that is human-readable and tries to be succinct.
+ /// Use this for showing types to the user where space is constrained (e.g. doc popups)
+ fn display_truncated<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ max_size: Option<usize>,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size,
+ omit_verbose_types: true,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a String representation of `self` that can be inserted into the given module.
+ /// Use this when generating code (e.g. assists)
+ fn display_source_code<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ module_id: ModuleId,
+ ) -> Result<String, DisplaySourceCodeError> {
+ let mut result = String::new();
+ match self.hir_fmt(&mut HirFormatter {
+ db,
+ fmt: &mut result,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::SourceCode { module_id },
+ }) {
+ Ok(()) => {}
+ Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
+ Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e),
+ };
+ Ok(result)
+ }
+
+ /// Returns a String representation of `self` for test purposes
+ fn display_test<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::Test,
+ }
+ }
+}
+
+impl<'a> HirFormatter<'a> {
+ pub fn write_joined<T: HirDisplay>(
+ &mut self,
+ iter: impl IntoIterator<Item = T>,
+ sep: &str,
+ ) -> Result<(), HirDisplayError> {
+ let mut first = true;
+ for e in iter {
+ if !first {
+ write!(self, "{}", sep)?;
+ }
+ first = false;
+
+ // Abbreviate multiple omitted types with a single ellipsis.
+ if self.should_truncate() {
+ return write!(self, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ e.hir_fmt(self)?;
+ }
+ Ok(())
+ }
+
+ /// This allows using the `write!` macro directly with a `HirFormatter`.
+ pub fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> Result<(), HirDisplayError> {
+ // We write to a buffer first to track output size
+ self.buf.clear();
+ fmt::write(&mut self.buf, args)?;
+ self.curr_size += self.buf.len();
+
+ // Then we write to the internal formatter from the buffer
+ self.fmt.write_str(&self.buf).map_err(HirDisplayError::from)
+ }
+
+ pub fn write_str(&mut self, s: &str) -> Result<(), HirDisplayError> {
+ self.fmt.write_str(s)?;
+ Ok(())
+ }
+
+ pub fn write_char(&mut self, c: char) -> Result<(), HirDisplayError> {
+ self.fmt.write_char(c)?;
+ Ok(())
+ }
+
+ pub fn should_truncate(&self) -> bool {
+ match self.max_size {
+ Some(max_size) => self.curr_size >= max_size,
+ None => false,
+ }
+ }
+
+ pub fn omit_verbose_types(&self) -> bool {
+ self.omit_verbose_types
+ }
+}
+
+#[derive(Clone, Copy)]
+pub enum DisplayTarget {
+ /// Display types for inlays, doc popups, autocompletion, etc...
+ /// Showing `{unknown}` or not qualifying paths is fine here.
+ /// There's no reason for this to fail.
+ Diagnostics,
+ /// Display types for inserting them in source files.
+ /// The generated code should compile, so paths need to be qualified.
+ SourceCode { module_id: ModuleId },
+ /// Only for test purpose to keep real types
+ Test,
+}
+
+impl DisplayTarget {
+ fn is_source_code(&self) -> bool {
+ matches!(self, Self::SourceCode { .. })
+ }
+ fn is_test(&self) -> bool {
+ matches!(self, Self::Test)
+ }
+}
+
+#[derive(Debug)]
+pub enum DisplaySourceCodeError {
+ PathNotFound,
+ UnknownType,
+ Closure,
+}
+
+pub enum HirDisplayError {
+ /// Errors that can occur when generating source code
+ DisplaySourceCodeError(DisplaySourceCodeError),
+ /// `FmtError` is required to be compatible with std::fmt::Display
+ FmtError,
+}
+impl From<fmt::Error> for HirDisplayError {
+ fn from(_: fmt::Error) -> Self {
+ Self::FmtError
+ }
+}
+
+pub struct HirDisplayWrapper<'a, T> {
+ db: &'a dyn HirDatabase,
+ t: &'a T,
+ max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
+where
+ T: HirDisplay,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.t.hir_fmt(&mut HirFormatter {
+ db: self.db,
+ fmt: f,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: self.max_size,
+ omit_verbose_types: self.omit_verbose_types,
+ display_target: self.display_target,
+ }) {
+ Ok(()) => Ok(()),
+ Err(HirDisplayError::FmtError) => Err(fmt::Error),
+ Err(HirDisplayError::DisplaySourceCodeError(_)) => {
+ // This should never happen
+ panic!("HirDisplay::hir_fmt failed with DisplaySourceCodeError when calling Display::fmt!")
+ }
+ }
+ }
+}
+
+const TYPE_HINT_TRUNCATION: &str = "…";
+
+impl<T: HirDisplay> HirDisplay for &'_ T {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(*self, f)
+ }
+}
+
+impl<T: HirDisplay + Internable> HirDisplay for Interned<T> {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(self.as_ref(), f)
+ }
+}
+
+impl HirDisplay for ProjectionTy {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ let trait_ = f.db.trait_data(self.trait_(f.db));
+ write!(f, "<")?;
+ self.self_type_parameter(Interner).hir_fmt(f)?;
+ write!(f, " as {}", trait_.name)?;
+ if self.substitution.len(Interner) > 1 {
+ write!(f, "<")?;
+ f.write_joined(&self.substitution.as_slice(Interner)[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for OpaqueTy {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ self.substitution.at(Interner, 0).hir_fmt(f)
+ }
+}
+
+impl HirDisplay for GenericArg {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.interned() {
+ crate::GenericArgData::Ty(ty) => ty.hir_fmt(f),
+ crate::GenericArgData::Lifetime(lt) => lt.hir_fmt(f),
+ crate::GenericArgData::Const(c) => c.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for Const {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = self.interned();
+ match data.value {
+ ConstValue::BoundVar(idx) => idx.hir_fmt(f),
+ ConstValue::InferenceVar(..) => write!(f, "#c#"),
+ ConstValue::Placeholder(idx) => {
+ let id = from_placeholder_idx(f.db, idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.type_or_consts[id.local_id];
+ write!(f, "{}", param_data.name().unwrap())
+ }
+ ConstValue::Concrete(c) => write!(f, "{}", c.interned),
+ }
+ }
+}
+
+impl HirDisplay for BoundVar {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "?{}.{}", self.debruijn.depth(), self.index)
+ }
+}
+
+impl HirDisplay for Ty {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self.kind(Interner) {
+ TyKind::Never => write!(f, "!")?,
+ TyKind::Str => write!(f, "str")?,
+ TyKind::Scalar(Scalar::Bool) => write!(f, "bool")?,
+ TyKind::Scalar(Scalar::Char) => write!(f, "char")?,
+ &TyKind::Scalar(Scalar::Float(t)) => write!(f, "{}", primitive::float_ty_to_string(t))?,
+ &TyKind::Scalar(Scalar::Int(t)) => write!(f, "{}", primitive::int_ty_to_string(t))?,
+ &TyKind::Scalar(Scalar::Uint(t)) => write!(f, "{}", primitive::uint_ty_to_string(t))?,
+ TyKind::Slice(t) => {
+ write!(f, "[")?;
+ t.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TyKind::Array(t, c) => {
+ write!(f, "[")?;
+ t.hir_fmt(f)?;
+ write!(f, "; ")?;
+ c.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TyKind::Raw(m, t) | TyKind::Ref(m, _, t) => {
+ if matches!(self.kind(Interner), TyKind::Raw(..)) {
+ write!(
+ f,
+ "*{}",
+ match m {
+ Mutability::Not => "const ",
+ Mutability::Mut => "mut ",
+ }
+ )?;
+ } else {
+ write!(
+ f,
+ "&{}",
+ match m {
+ Mutability::Not => "",
+ Mutability::Mut => "mut ",
+ }
+ )?;
+ }
+
+ // FIXME: all this just to decide whether to use parentheses...
+ let contains_impl_fn = |bounds: &[QuantifiedWhereClause]| {
+ bounds.iter().any(|bound| {
+ if let WhereClause::Implemented(trait_ref) = bound.skip_binders() {
+ let trait_ = trait_ref.hir_trait_id();
+ fn_traits(f.db.upcast(), trait_).any(|it| it == trait_)
+ } else {
+ false
+ }
+ })
+ };
+ let (preds_to_print, has_impl_fn_pred) = match t.kind(Interner) {
+ TyKind::Dyn(dyn_ty) if dyn_ty.bounds.skip_binders().interned().len() > 1 => {
+ let bounds = dyn_ty.bounds.skip_binders().interned();
+ (bounds.len(), contains_impl_fn(bounds))
+ }
+ TyKind::Alias(AliasTy::Opaque(OpaqueTy {
+ opaque_ty_id,
+ substitution: parameters,
+ }))
+ | TyKind::OpaqueType(opaque_ty_id, parameters) => {
+ let impl_trait_id =
+ f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+ if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id {
+ let datas =
+ f.db.return_type_impl_traits(func)
+ .expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, parameters);
+ let mut len = bounds.skip_binders().len();
+
+ // Don't count Sized but count when it absent
+ // (i.e. when explicit ?Sized bound is set).
+ let default_sized = SizedByDefault::Sized {
+ anchor: func.lookup(f.db.upcast()).module(f.db.upcast()).krate(),
+ };
+ let sized_bounds = bounds
+ .skip_binders()
+ .iter()
+ .filter(|b| {
+ matches!(
+ b.skip_binders(),
+ WhereClause::Implemented(trait_ref)
+ if default_sized.is_sized_trait(
+ trait_ref.hir_trait_id(),
+ f.db.upcast(),
+ ),
+ )
+ })
+ .count();
+ match sized_bounds {
+ 0 => len += 1,
+ _ => {
+ len = len.saturating_sub(sized_bounds);
+ }
+ }
+
+ (len, contains_impl_fn(bounds.skip_binders()))
+ } else {
+ (0, false)
+ }
+ }
+ _ => (0, false),
+ };
+
+ if has_impl_fn_pred && preds_to_print <= 2 {
+ return t.hir_fmt(f);
+ }
+
+ if preds_to_print > 1 {
+ write!(f, "(")?;
+ t.hir_fmt(f)?;
+ write!(f, ")")?;
+ } else {
+ t.hir_fmt(f)?;
+ }
+ }
+ TyKind::Tuple(_, substs) => {
+ if substs.len(Interner) == 1 {
+ write!(f, "(")?;
+ substs.at(Interner, 0).hir_fmt(f)?;
+ write!(f, ",)")?;
+ } else {
+ write!(f, "(")?;
+ f.write_joined(&*substs.as_slice(Interner), ", ")?;
+ write!(f, ")")?;
+ }
+ }
+ TyKind::Function(fn_ptr) => {
+ let sig = CallableSig::from_fn_ptr(fn_ptr);
+ sig.hir_fmt(f)?;
+ }
+ TyKind::FnDef(def, parameters) => {
+ let def = from_chalk(f.db, *def);
+ let sig = f.db.callable_item_signature(def).substitute(Interner, parameters);
+ match def {
+ CallableDefId::FunctionId(ff) => {
+ write!(f, "fn {}", f.db.function_data(ff).name)?
+ }
+ CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+ CallableDefId::EnumVariantId(e) => {
+ write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
+ }
+ };
+ if parameters.len(Interner) > 0 {
+ let generics = generics(f.db.upcast(), def.into());
+ let (parent_params, self_param, type_params, const_params, _impl_trait_params) =
+ generics.provenance_split();
+ let total_len = parent_params + self_param + type_params + const_params;
+ // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
+ if total_len > 0 {
+ write!(f, "<")?;
+ f.write_joined(&parameters.as_slice(Interner)[..total_len], ", ")?;
+ write!(f, ">")?;
+ }
+ }
+ write!(f, "(")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, ")")?;
+ let ret = sig.ret();
+ if !ret.is_unit() {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ }
+ TyKind::Adt(AdtId(def_id), parameters) => {
+ match f.display_target {
+ DisplayTarget::Diagnostics | DisplayTarget::Test => {
+ let name = match *def_id {
+ hir_def::AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
+ hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
+ hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
+ };
+ write!(f, "{}", name)?;
+ }
+ DisplayTarget::SourceCode { module_id } => {
+ if let Some(path) = find_path::find_path(
+ f.db.upcast(),
+ ItemInNs::Types((*def_id).into()),
+ module_id,
+ ) {
+ write!(f, "{}", path)?;
+ } else {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::PathNotFound,
+ ));
+ }
+ }
+ }
+
+ if parameters.len(Interner) > 0 {
+ let parameters_to_write = if f.display_target.is_source_code()
+ || f.omit_verbose_types()
+ {
+ match self
+ .as_generic_def(f.db)
+ .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
+ .filter(|defaults| !defaults.is_empty())
+ {
+ None => parameters.as_slice(Interner),
+ Some(default_parameters) => {
+ fn should_show(
+ parameter: &GenericArg,
+ default_parameters: &[Binders<GenericArg>],
+ i: usize,
+ parameters: &Substitution,
+ ) -> bool {
+ if parameter.ty(Interner).map(|x| x.kind(Interner))
+ == Some(&TyKind::Error)
+ {
+ return true;
+ }
+ if let Some(ConstValue::Concrete(c)) =
+ parameter.constant(Interner).map(|x| x.data(Interner).value)
+ {
+ if c.interned == ConstScalar::Unknown {
+ return true;
+ }
+ }
+ let default_parameter = match default_parameters.get(i) {
+ Some(x) => x,
+ None => return true,
+ };
+ let actual_default = default_parameter
+ .clone()
+ .substitute(Interner, &subst_prefix(parameters, i));
+ parameter != &actual_default
+ }
+ let mut default_from = 0;
+ for (i, parameter) in parameters.iter(Interner).enumerate() {
+ if should_show(parameter, &default_parameters, i, parameters) {
+ default_from = i + 1;
+ }
+ }
+ &parameters.as_slice(Interner)[0..default_from]
+ }
+ }
+ } else {
+ parameters.as_slice(Interner)
+ };
+ if !parameters_to_write.is_empty() {
+ write!(f, "<")?;
+
+ if f.display_target.is_source_code() {
+ let mut first = true;
+ for generic_arg in parameters_to_write {
+ if !first {
+ write!(f, ", ")?;
+ }
+ first = false;
+
+ if generic_arg.ty(Interner).map(|ty| ty.kind(Interner))
+ == Some(&TyKind::Error)
+ {
+ write!(f, "_")?;
+ } else {
+ generic_arg.hir_fmt(f)?;
+ }
+ }
+ } else {
+ f.write_joined(parameters_to_write, ", ")?;
+ }
+
+ write!(f, ">")?;
+ }
+ }
+ }
+ TyKind::AssociatedType(assoc_type_id, parameters) => {
+ let type_alias = from_assoc_type_id(*assoc_type_id);
+ let trait_ = match type_alias.lookup(f.db.upcast()).container {
+ ItemContainerId::TraitId(it) => it,
+ _ => panic!("not an associated type"),
+ };
+ let trait_ = f.db.trait_data(trait_);
+ let type_alias_data = f.db.type_alias_data(type_alias);
+
+ // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
+ if f.display_target.is_test() {
+ write!(f, "{}::{}", trait_.name, type_alias_data.name)?;
+ if parameters.len(Interner) > 0 {
+ write!(f, "<")?;
+ f.write_joined(&*parameters.as_slice(Interner), ", ")?;
+ write!(f, ">")?;
+ }
+ } else {
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(type_alias),
+ substitution: parameters.clone(),
+ };
+
+ projection_ty.hir_fmt(f)?;
+ }
+ }
+ TyKind::Foreign(type_alias) => {
+ let type_alias = f.db.type_alias_data(from_foreign_def_id(*type_alias));
+ write!(f, "{}", type_alias.name)?;
+ }
+ TyKind::OpaqueType(opaque_ty_id, parameters) => {
+ let impl_trait_id = f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+ match impl_trait_id {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, &parameters);
+ let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ bounds.skip_binders(),
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
+ }
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ write!(f, "impl Future<Output = ")?;
+ parameters.at(Interner, 0).hir_fmt(f)?;
+ write!(f, ">")?;
+ }
+ }
+ }
+ TyKind::Closure(.., substs) => {
+ if f.display_target.is_source_code() {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::Closure,
+ ));
+ }
+ let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(f.db);
+ if let Some(sig) = sig {
+ if sig.params().is_empty() {
+ write!(f, "||")?;
+ } else if f.should_truncate() {
+ write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
+ } else {
+ write!(f, "|")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, "|")?;
+ };
+
+ write!(f, " -> ")?;
+ sig.ret().hir_fmt(f)?;
+ } else {
+ write!(f, "{{closure}}")?;
+ }
+ }
+ TyKind::Placeholder(idx) => {
+ let id = from_placeholder_idx(f.db, *idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.type_or_consts[id.local_id];
+ match param_data {
+ TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+ TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
+ write!(f, "{}", p.name.clone().unwrap_or_else(Name::missing))?
+ }
+ TypeParamProvenance::ArgumentImplTrait => {
+ let substs = generics.placeholder_subst(f.db);
+ let bounds =
+ f.db.generic_predicates(id.parent)
+ .iter()
+ .map(|pred| pred.clone().substitute(Interner, &substs))
+ .filter(|wc| match &wc.skip_binders() {
+ WhereClause::Implemented(tr) => {
+ &tr.self_type_parameter(Interner) == self
+ }
+ WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(proj),
+ ty: _,
+ }) => &proj.self_type_parameter(Interner) == self,
+ _ => false,
+ })
+ .collect::<Vec<_>>();
+ let krate = id.parent.module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ &bounds,
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ }
+ },
+ TypeOrConstParamData::ConstParamData(p) => {
+ write!(f, "{}", p.name)?;
+ }
+ }
+ }
+ TyKind::BoundVar(idx) => idx.hir_fmt(f)?,
+ TyKind::Dyn(dyn_ty) => {
+ write_bounds_like_dyn_trait_with_prefix(
+ "dyn",
+ dyn_ty.bounds.skip_binders().interned(),
+ SizedByDefault::NotSized,
+ f,
+ )?;
+ }
+ TyKind::Alias(AliasTy::Projection(p_ty)) => p_ty.hir_fmt(f)?,
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ let impl_trait_id = f.db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into());
+ match impl_trait_id {
+ ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ let bounds = data.substitute(Interner, &opaque_ty.substitution);
+ let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+ write_bounds_like_dyn_trait_with_prefix(
+ "impl",
+ bounds.skip_binders(),
+ SizedByDefault::Sized { anchor: krate },
+ f,
+ )?;
+ }
+ ImplTraitId::AsyncBlockTypeImplTrait(..) => {
+ write!(f, "{{async block}}")?;
+ }
+ };
+ }
+ TyKind::Error => {
+ if f.display_target.is_source_code() {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::UnknownType,
+ ));
+ }
+ write!(f, "{{unknown}}")?;
+ }
+ TyKind::InferenceVar(..) => write!(f, "_")?,
+ TyKind::Generator(..) => write!(f, "{{generator}}")?,
+ TyKind::GeneratorWitness(..) => write!(f, "{{generator witness}}")?,
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for CallableSig {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "fn(")?;
+ f.write_joined(self.params(), ", ")?;
+ if self.is_varargs {
+ if self.params().is_empty() {
+ write!(f, "...")?;
+ } else {
+ write!(f, ", ...")?;
+ }
+ }
+ write!(f, ")")?;
+ let ret = self.ret();
+ if !ret.is_unit() {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = TraitId> {
+ let krate = trait_.lookup(db).container.krate();
+ utils::fn_traits(db, krate)
+}
+
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum SizedByDefault {
+ NotSized,
+ Sized { anchor: CrateId },
+}
+
+impl SizedByDefault {
+ fn is_sized_trait(self, trait_: TraitId, db: &dyn DefDatabase) -> bool {
+ match self {
+ Self::NotSized => false,
+ Self::Sized { anchor } => {
+ let sized_trait = db
+ .lang_item(anchor, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ Some(trait_) == sized_trait
+ }
+ }
+ }
+}
+
+pub fn write_bounds_like_dyn_trait_with_prefix(
+ prefix: &str,
+ predicates: &[QuantifiedWhereClause],
+ default_sized: SizedByDefault,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ write!(f, "{}", prefix)?;
+ if !predicates.is_empty()
+ || predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
+ {
+ write!(f, " ")?;
+ write_bounds_like_dyn_trait(predicates, default_sized, f)
+ } else {
+ Ok(())
+ }
+}
+
+fn write_bounds_like_dyn_trait(
+ predicates: &[QuantifiedWhereClause],
+ default_sized: SizedByDefault,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ // Note: This code is written to produce nice results (i.e.
+ // corresponding to surface Rust) for types that can occur in
+ // actual Rust. It will have weird results if the predicates
+ // aren't as expected (i.e. self types = $0, projection
+ // predicates for a certain trait come after the Implemented
+ // predicate for that trait).
+ let mut first = true;
+ let mut angle_open = false;
+ let mut is_fn_trait = false;
+ let mut is_sized = false;
+ for p in predicates.iter() {
+ match p.skip_binders() {
+ WhereClause::Implemented(trait_ref) => {
+ let trait_ = trait_ref.hir_trait_id();
+ if default_sized.is_sized_trait(trait_, f.db.upcast()) {
+ is_sized = true;
+ if matches!(default_sized, SizedByDefault::Sized { .. }) {
+ // Don't print +Sized, but rather +?Sized if absent.
+ continue;
+ }
+ }
+ if !is_fn_trait {
+ is_fn_trait = fn_traits(f.db.upcast(), trait_).any(|it| it == trait_);
+ }
+ if !is_fn_trait && angle_open {
+ write!(f, ">")?;
+ angle_open = false;
+ }
+ if !first {
+ write!(f, " + ")?;
+ }
+ // We assume that the self type is ^0.0 (i.e. the
+ // existential) here, which is the only thing that's
+ // possible in actual Rust, and hence don't print it
+ write!(f, "{}", f.db.trait_data(trait_).name)?;
+ if let [_, params @ ..] = &*trait_ref.substitution.as_slice(Interner) {
+ if is_fn_trait {
+ if let Some(args) =
+ params.first().and_then(|it| it.assert_ty_ref(Interner).as_tuple())
+ {
+ write!(f, "(")?;
+ f.write_joined(args.as_slice(Interner), ", ")?;
+ write!(f, ")")?;
+ }
+ } else if !params.is_empty() {
+ write!(f, "<")?;
+ f.write_joined(params, ", ")?;
+ // there might be assoc type bindings, so we leave the angle brackets open
+ angle_open = true;
+ }
+ }
+ }
+ WhereClause::AliasEq(alias_eq) if is_fn_trait => {
+ is_fn_trait = false;
+ if !alias_eq.ty.is_unit() {
+ write!(f, " -> ")?;
+ alias_eq.ty.hir_fmt(f)?;
+ }
+ }
+ WhereClause::AliasEq(AliasEq { ty, alias }) => {
+ // in types in actual Rust, these will always come
+ // after the corresponding Implemented predicate
+ if angle_open {
+ write!(f, ", ")?;
+ } else {
+ write!(f, "<")?;
+ angle_open = true;
+ }
+ if let AliasTy::Projection(proj) = alias {
+ let type_alias =
+ f.db.type_alias_data(from_assoc_type_id(proj.associated_ty_id));
+ write!(f, "{} = ", type_alias.name)?;
+ }
+ ty.hir_fmt(f)?;
+ }
+
+ // FIXME implement these
+ WhereClause::LifetimeOutlives(_) => {}
+ WhereClause::TypeOutlives(_) => {}
+ }
+ first = false;
+ }
+ if angle_open {
+ write!(f, ">")?;
+ }
+ if matches!(default_sized, SizedByDefault::Sized { .. }) {
+ if !is_sized {
+ write!(f, "{}?Sized", if first { "" } else { " + " })?;
+ } else if first {
+ write!(f, "Sized")?;
+ }
+ }
+ Ok(())
+}
+
+fn fmt_trait_ref(
+ tr: &TraitRef,
+ f: &mut HirFormatter<'_>,
+ use_as: bool,
+) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ tr.self_type_parameter(Interner).hir_fmt(f)?;
+ if use_as {
+ write!(f, " as ")?;
+ } else {
+ write!(f, ": ")?;
+ }
+ write!(f, "{}", f.db.trait_data(tr.hir_trait_id()).name)?;
+ if tr.substitution.len(Interner) > 1 {
+ write!(f, "<")?;
+ f.write_joined(&tr.substitution.as_slice(Interner)[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ Ok(())
+}
+
+impl HirDisplay for TraitRef {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ fmt_trait_ref(self, f, false)
+ }
+}
+
+impl HirDisplay for WhereClause {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self {
+ WhereClause::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
+ WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
+ write!(f, "<")?;
+ fmt_trait_ref(&projection_ty.trait_ref(f.db), f, true)?;
+ write!(
+ f,
+ ">::{} = ",
+ f.db.type_alias_data(from_assoc_type_id(projection_ty.associated_ty_id)).name,
+ )?;
+ ty.hir_fmt(f)?;
+ }
+ WhereClause::AliasEq(_) => write!(f, "{{error}}")?,
+
+ // FIXME implement these
+ WhereClause::TypeOutlives(..) => {}
+ WhereClause::LifetimeOutlives(..) => {}
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for LifetimeOutlives {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.a.hir_fmt(f)?;
+ write!(f, ": ")?;
+ self.b.hir_fmt(f)
+ }
+}
+
+impl HirDisplay for Lifetime {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.interned().hir_fmt(f)
+ }
+}
+
+impl HirDisplay for LifetimeData {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
+ LifetimeData::InferenceVar(_) => write!(f, "_"),
+ LifetimeData::Placeholder(idx) => {
+ let id = lt_from_placeholder_idx(f.db, *idx);
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.lifetimes[id.local_id];
+ write!(f, "{}", param_data.name)
+ }
+ LifetimeData::Static => write!(f, "'static"),
+ LifetimeData::Empty(_) => Ok(()),
+ LifetimeData::Erased => Ok(()),
+ LifetimeData::Phantom(_, _) => Ok(()),
+ }
+ }
+}
+
+impl HirDisplay for DomainGoal {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ DomainGoal::Holds(wc) => {
+ write!(f, "Holds(")?;
+ wc.hir_fmt(f)?;
+ write!(f, ")")?;
+ }
+ _ => write!(f, "?")?,
+ }
+ Ok(())
+ }
+}
+
+pub fn write_visibility(
+ module_id: ModuleId,
+ vis: Visibility,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ match vis {
+ Visibility::Public => write!(f, "pub "),
+ Visibility::Module(vis_id) => {
+ let def_map = module_id.def_map(f.db.upcast());
+ let root_module_id = def_map.module_id(def_map.root());
+ if vis_id == module_id {
+ // pub(self) or omitted
+ Ok(())
+ } else if root_module_id == vis_id {
+ write!(f, "pub(crate) ")
+ } else if module_id.containing_module(f.db.upcast()) == Some(vis_id) {
+ write!(f, "pub(super) ")
+ } else {
+ write!(f, "pub(in ...) ")
+ }
+ }
+ }
+}
+
+impl HirDisplay for TypeRef {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ TypeRef::Never => write!(f, "!")?,
+ TypeRef::Placeholder => write!(f, "_")?,
+ TypeRef::Tuple(elems) => {
+ write!(f, "(")?;
+ f.write_joined(elems, ", ")?;
+ if elems.len() == 1 {
+ write!(f, ",")?;
+ }
+ write!(f, ")")?;
+ }
+ TypeRef::Path(path) => path.hir_fmt(f)?,
+ TypeRef::RawPtr(inner, mutability) => {
+ let mutability = match mutability {
+ hir_def::type_ref::Mutability::Shared => "*const ",
+ hir_def::type_ref::Mutability::Mut => "*mut ",
+ };
+ write!(f, "{}", mutability)?;
+ inner.hir_fmt(f)?;
+ }
+ TypeRef::Reference(inner, lifetime, mutability) => {
+ let mutability = match mutability {
+ hir_def::type_ref::Mutability::Shared => "",
+ hir_def::type_ref::Mutability::Mut => "mut ",
+ };
+ write!(f, "&")?;
+ if let Some(lifetime) = lifetime {
+ write!(f, "{} ", lifetime.name)?;
+ }
+ write!(f, "{}", mutability)?;
+ inner.hir_fmt(f)?;
+ }
+ TypeRef::Array(inner, len) => {
+ write!(f, "[")?;
+ inner.hir_fmt(f)?;
+ write!(f, "; {}]", len)?;
+ }
+ TypeRef::Slice(inner) => {
+ write!(f, "[")?;
+ inner.hir_fmt(f)?;
+ write!(f, "]")?;
+ }
+ TypeRef::Fn(parameters, is_varargs) => {
+ // FIXME: Function pointer qualifiers.
+ write!(f, "fn(")?;
+ if let Some(((_, return_type), function_parameters)) = parameters.split_last() {
+ for index in 0..function_parameters.len() {
+ let (param_name, param_type) = &function_parameters[index];
+ if let Some(name) = param_name {
+ write!(f, "{}: ", name)?;
+ }
+
+ param_type.hir_fmt(f)?;
+
+ if index != function_parameters.len() - 1 {
+ write!(f, ", ")?;
+ }
+ }
+ if *is_varargs {
+ write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?;
+ }
+ write!(f, ")")?;
+ match &return_type {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ _ => {
+ write!(f, " -> ")?;
+ return_type.hir_fmt(f)?;
+ }
+ }
+ }
+ }
+ TypeRef::ImplTrait(bounds) => {
+ write!(f, "impl ")?;
+ f.write_joined(bounds, " + ")?;
+ }
+ TypeRef::DynTrait(bounds) => {
+ write!(f, "dyn ")?;
+ f.write_joined(bounds, " + ")?;
+ }
+ TypeRef::Macro(macro_call) => {
+ let macro_call = macro_call.to_node(f.db.upcast());
+ let ctx = body::LowerCtx::with_hygiene(f.db.upcast(), &Hygiene::new_unhygienic());
+ match macro_call.path() {
+ Some(path) => match Path::from_src(path, &ctx) {
+ Some(path) => path.hir_fmt(f)?,
+ None => write!(f, "{{macro}}")?,
+ },
+ None => write!(f, "{{macro}}")?,
+ }
+ write!(f, "!(..)")?;
+ }
+ TypeRef::Error => write!(f, "{{error}}")?,
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for TypeBound {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ TypeBound::Path(path, modifier) => {
+ match modifier {
+ TraitBoundModifier::None => (),
+ TraitBoundModifier::Maybe => write!(f, "?")?,
+ }
+ path.hir_fmt(f)
+ }
+ TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
+ TypeBound::ForLifetime(lifetimes, path) => {
+ write!(f, "for<{}> ", lifetimes.iter().format(", "))?;
+ path.hir_fmt(f)
+ }
+ TypeBound::Error => write!(f, "{{error}}"),
+ }
+ }
+}
+
+impl HirDisplay for Path {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match (self.type_anchor(), self.kind()) {
+ (Some(anchor), _) => {
+ write!(f, "<")?;
+ anchor.hir_fmt(f)?;
+ write!(f, ">")?;
+ }
+ (_, PathKind::Plain) => {}
+ (_, PathKind::Abs) => {}
+ (_, PathKind::Crate) => write!(f, "crate")?,
+ (_, PathKind::Super(0)) => write!(f, "self")?,
+ (_, PathKind::Super(n)) => {
+ for i in 0..*n {
+ if i > 0 {
+ write!(f, "::")?;
+ }
+ write!(f, "super")?;
+ }
+ }
+ (_, PathKind::DollarCrate(id)) => {
+ // Resolve `$crate` to the crate's display name.
+ // FIXME: should use the dependency name instead if available, but that depends on
+ // the crate invoking `HirDisplay`
+ let crate_graph = f.db.crate_graph();
+ let name = crate_graph[*id]
+ .display_name
+ .as_ref()
+ .map(|name| name.canonical_name())
+ .unwrap_or("$crate");
+ write!(f, "{name}")?
+ }
+ }
+
+ for (seg_idx, segment) in self.segments().iter().enumerate() {
+ if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
+ write!(f, "::")?;
+ }
+ write!(f, "{}", segment.name)?;
+ if let Some(generic_args) = segment.args_and_bindings {
+ // We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
+ // Do we actually format expressions?
+ if generic_args.desugared_from_fn {
+ // First argument will be a tuple, which already includes the parentheses.
+ // If the tuple only contains 1 item, write it manually to avoid the trailing `,`.
+ if let hir_def::path::GenericArg::Type(TypeRef::Tuple(v)) =
+ &generic_args.args[0]
+ {
+ if v.len() == 1 {
+ write!(f, "(")?;
+ v[0].hir_fmt(f)?;
+ write!(f, ")")?;
+ } else {
+ generic_args.args[0].hir_fmt(f)?;
+ }
+ }
+ if let Some(ret) = &generic_args.bindings[0].type_ref {
+ if !matches!(ret, TypeRef::Tuple(v) if v.is_empty()) {
+ write!(f, " -> ")?;
+ ret.hir_fmt(f)?;
+ }
+ }
+ return Ok(());
+ }
+
+ write!(f, "<")?;
+ let mut first = true;
+ for arg in &generic_args.args {
+ if first {
+ first = false;
+ if generic_args.has_self_type {
+ // FIXME: Convert to `<Ty as Trait>` form.
+ write!(f, "Self = ")?;
+ }
+ } else {
+ write!(f, ", ")?;
+ }
+ arg.hir_fmt(f)?;
+ }
+ for binding in &generic_args.bindings {
+ if first {
+ first = false;
+ } else {
+ write!(f, ", ")?;
+ }
+ write!(f, "{}", binding.name)?;
+ match &binding.type_ref {
+ Some(ty) => {
+ write!(f, " = ")?;
+ ty.hir_fmt(f)?
+ }
+ None => {
+ write!(f, ": ")?;
+ f.write_joined(&binding.bounds, " + ")?;
+ }
+ }
+ }
+ write!(f, ">")?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for hir_def::path::GenericArg {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
+ hir_def::path::GenericArg::Const(c) => write!(f, "{}", c),
+ hir_def::path::GenericArg::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
new file mode 100644
index 000000000..46eeea0e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -0,0 +1,1088 @@
+//! Type inference, i.e. the process of walking through the code and determining
+//! the type of each expression and pattern.
+//!
+//! For type inference, compare the implementations in rustc (the various
+//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
+//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
+//! inference here is the `infer` function, which infers the types of all
+//! expressions in a given function.
+//!
+//! During inference, types (i.e. the `Ty` struct) can contain type 'variables'
+//! which represent currently unknown types; as we walk through the expressions,
+//! we might determine that certain variables need to be equal to each other, or
+//! to certain types. To record this, we use the union-find implementation from
+//! the `ena` crate, which is extracted from rustc.
+
+use std::ops::Index;
+use std::sync::Arc;
+
+use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
+use hir_def::{
+ body::Body,
+ data::{ConstData, StaticData},
+ expr::{BindingAnnotation, ExprId, PatId},
+ lang_item::LangItemTarget,
+ path::{path, Path},
+ resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
+ type_ref::TypeRef,
+ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, Lookup,
+ TraitId, TypeAliasId, VariantId,
+};
+use hir_expand::name::{name, Name};
+use itertools::Either;
+use la_arena::ArenaMap;
+use rustc_hash::FxHashMap;
+use stdx::{always, impl_from};
+
+use crate::{
+ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany,
+ lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Const, DomainGoal,
+ GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, Substitution,
+ TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+// This lint has a false positive here. See the link below for details.
+//
+// https://github.com/rust-lang/rust/issues/57411
+#[allow(unreachable_pub)]
+pub use coerce::could_coerce;
+#[allow(unreachable_pub)]
+pub use unify::could_unify;
+
+pub(crate) mod unify;
+mod path;
+mod expr;
+mod pat;
+mod coerce;
+mod closure;
+
+/// The entry point of type inference.
+pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer_query");
+ let resolver = def.resolver(db.upcast());
+ let body = db.body(def);
+ let mut ctx = InferenceContext::new(db, def, &body, resolver);
+
+ match def {
+ DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
+ DefWithBodyId::FunctionId(f) => ctx.collect_fn(f),
+ DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
+ }
+
+ ctx.infer_body();
+
+ Arc::new(ctx.resolve_all())
+}
+
+/// Fully normalize all the types found within `ty` in context of `owner` body definition.
+///
+/// This is appropriate to use only after type-check: it assumes
+/// that normalization will succeed, for example.
+pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> Ty {
+ if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) {
+ return ty;
+ }
+ let krate = owner.module(db.upcast()).krate();
+ let trait_env = owner
+ .as_generic_def_id()
+ .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
+ let mut table = unify::InferenceTable::new(db, trait_env);
+
+ let ty_with_vars = table.normalize_associated_types_in(ty);
+ table.resolve_obligations_as_possible();
+ table.propagate_diverging_flag();
+ table.resolve_completely(ty_with_vars)
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+enum ExprOrPatId {
+ ExprId(ExprId),
+ PatId(PatId),
+}
+impl_from!(ExprId, PatId for ExprOrPatId);
+
+/// Binding modes inferred for patterns.
+/// <https://doc.rust-lang.org/reference/patterns.html#binding-modes>
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+impl BindingMode {
+ fn convert(annotation: BindingAnnotation) -> BindingMode {
+ match annotation {
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
+ BindingAnnotation::Ref => BindingMode::Ref(Mutability::Not),
+ BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
+ }
+ }
+}
+
+impl Default for BindingMode {
+ fn default() -> Self {
+ BindingMode::Move
+ }
+}
+
+/// Used to generalize patterns and assignee expressions.
+trait PatLike: Into<ExprOrPatId> + Copy {
+ type BindingMode: Copy;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty;
+}
+
+impl PatLike for ExprId {
+ type BindingMode = ();
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ _: Self::BindingMode,
+ ) -> Ty {
+ this.infer_assignee_expr(id, expected_ty)
+ }
+}
+
+impl PatLike for PatId {
+ type BindingMode = BindingMode;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty {
+ this.infer_pat(id, expected_ty, default_bm)
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct InferOk<T> {
+ value: T,
+ goals: Vec<InEnvironment<Goal>>,
+}
+
+impl<T> InferOk<T> {
+ fn map<U>(self, f: impl FnOnce(T) -> U) -> InferOk<U> {
+ InferOk { value: f(self.value), goals: self.goals }
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct TypeError;
+pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum InferenceDiagnostic {
+ NoSuchField { expr: ExprId },
+ BreakOutsideOfLoop { expr: ExprId },
+ MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize },
+}
+
+/// A mismatch between an expected and an inferred type.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeMismatch {
+ pub expected: Ty,
+ pub actual: Ty,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct InternedStandardTypes {
+ unknown: Ty,
+ bool_: Ty,
+ unit: Ty,
+}
+
+impl Default for InternedStandardTypes {
+ fn default() -> Self {
+ InternedStandardTypes {
+ unknown: TyKind::Error.intern(Interner),
+ bool_: TyKind::Scalar(Scalar::Bool).intern(Interner),
+ unit: TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner),
+ }
+ }
+}
+/// Represents coercing a value to a different type of value.
+///
+/// We transform values by following a number of `Adjust` steps in order.
+/// See the documentation on variants of `Adjust` for more details.
+///
+/// Here are some common scenarios:
+///
+/// 1. The simplest cases are where a pointer is not adjusted fat vs thin.
+/// Here the pointer will be dereferenced N times (where a dereference can
+/// happen to raw or borrowed pointers or any smart pointer which implements
+/// Deref, including Box<_>). The types of dereferences is given by
+/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
+/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
+/// `false`.
+///
+/// 2. A thin-to-fat coercion involves unsizing the underlying data. We start
+/// with a thin pointer, deref a number of times, unsize the underlying data,
+/// then autoref. The 'unsize' phase may change a fixed length array to a
+/// dynamically sized one, a concrete object to a trait object, or statically
+/// sized struct to a dynamically sized one. E.g., &[i32; 4] -> &[i32] is
+/// represented by:
+///
+/// ```
+/// Deref(None) -> [i32; 4],
+/// Borrow(AutoBorrow::Ref) -> &[i32; 4],
+/// Unsize -> &[i32],
+/// ```
+///
+/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
+/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
+/// The autoderef and -ref are the same as in the above example, but the type
+/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
+/// the underlying conversions from `[i32; 4]` to `[i32]`.
+///
+/// 3. Coercing a `Box<T>` to `Box<dyn Trait>` is an interesting special case. In
+/// that case, we have the pointer we need coming in, so there are no
+/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
+/// At some point, of course, `Box` should move out of the compiler, in which
+/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
+/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct Adjustment {
+ pub kind: Adjust,
+ pub target: Ty,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Adjust {
+ /// Go from ! to any type.
+ NeverToAny,
+ /// Dereference once, producing a place.
+ Deref(Option<OverloadedDeref>),
+ /// Take the address and produce either a `&` or `*` pointer.
+ Borrow(AutoBorrow),
+ Pointer(PointerCast),
+}
+
+/// An overloaded autoderef step, representing a `Deref(Mut)::deref(_mut)`
+/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
+/// The target type is `U` in both cases, with the region and mutability
+/// being those shared by both the receiver and the returned reference.
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct OverloadedDeref(pub Mutability);
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum AutoBorrow {
+ /// Converts from T to &T.
+ Ref(Mutability),
+ /// Converts from T to *T.
+ RawPtr(Mutability),
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum PointerCast {
+ /// Go from a fn-item type to a fn-pointer type.
+ ReifyFnPointer,
+
+ /// Go from a safe fn pointer to an unsafe fn pointer.
+ UnsafeFnPointer,
+
+ /// Go from a non-capturing closure to an fn pointer or an unsafe fn pointer.
+ /// It cannot convert a closure that requires unsafe.
+ ClosureFnPointer(Safety),
+
+ /// Go from a mut raw pointer to a const raw pointer.
+ MutToConstPointer,
+
+ #[allow(dead_code)]
+ /// Go from `*const [T; N]` to `*const T`
+ ArrayToPointer,
+
+ /// Unsize a pointer/reference value, e.g., `&[T; n]` to
+ /// `&[T]`. Note that the source could be a thin or fat pointer.
+ /// This will do things like convert thin pointers to fat
+ /// pointers, or convert structs containing thin pointers to
+ /// structs containing fat pointers, or convert between fat
+ /// pointers. We don't store the details of how the transform is
+ /// done (in fact, we don't know that, because it might depend on
+ /// the precise type parameters). We just store the target
+ /// type. Codegen backends and miri figure out what has to be done
+ /// based on the precise source/target type at hand.
+ Unsize,
+}
+
+/// The result of type inference: A mapping from expressions and patterns to types.
+#[derive(Clone, PartialEq, Eq, Debug, Default)]
+pub struct InferenceResult {
+ /// For each method call expr, records the function it resolves to.
+ method_resolutions: FxHashMap<ExprId, (FunctionId, Substitution)>,
+ /// For each field access expr, records the field it resolves to.
+ field_resolutions: FxHashMap<ExprId, FieldId>,
+ /// For each struct literal or pattern, records the variant it resolves to.
+ variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
+ /// For each associated item record what it resolves to
+ assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
+ pub diagnostics: Vec<InferenceDiagnostic>,
+ pub type_of_expr: ArenaMap<ExprId, Ty>,
+ /// For each pattern record the type it resolves to.
+ ///
+ /// **Note**: When a pattern type is resolved it may still contain
+ /// unresolved or missing subpatterns or subpatterns of mismatched types.
+ pub type_of_pat: ArenaMap<PatId, Ty>,
+ type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
+ /// Interned Unknown to return references to.
+ standard_types: InternedStandardTypes,
+ /// Stores the types which were implicitly dereferenced in pattern binding modes.
+ pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
+ pub pat_binding_modes: FxHashMap<PatId, BindingMode>,
+ pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
+}
+
+impl InferenceResult {
+ pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, Substitution)> {
+ self.method_resolutions.get(&expr).cloned()
+ }
+ pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> {
+ self.field_resolutions.get(&expr).copied()
+ }
+ pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
+ self.type_mismatches.get(&expr.into())
+ }
+ pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
+ self.type_mismatches.get(&pat.into())
+ }
+ pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
+ self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
+ ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
+ _ => None,
+ })
+ }
+ pub fn pat_type_mismatches(&self) -> impl Iterator<Item = (PatId, &TypeMismatch)> {
+ self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
+ ExprOrPatId::PatId(pat) => Some((pat, mismatch)),
+ _ => None,
+ })
+ }
+}
+
+impl Index<ExprId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, expr: ExprId) -> &Ty {
+ self.type_of_expr.get(expr).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
+impl Index<PatId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, pat: PatId) -> &Ty {
+ self.type_of_pat.get(pat).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
+/// The inference context contains all information needed during type inference.
+#[derive(Clone, Debug)]
+pub(crate) struct InferenceContext<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) owner: DefWithBodyId,
+ pub(crate) body: &'a Body,
+ pub(crate) resolver: Resolver,
+ table: unify::InferenceTable<'a>,
+ trait_env: Arc<TraitEnvironment>,
+ pub(crate) result: InferenceResult,
+ /// The return type of the function being inferred, the closure or async block if we're
+ /// currently within one.
+ ///
+ /// We might consider using a nested inference context for checking
+ /// closures, but currently this is the only field that will change there,
+ /// so it doesn't make sense.
+ return_ty: Ty,
+ diverges: Diverges,
+ breakables: Vec<BreakableContext>,
+}
+
+#[derive(Clone, Debug)]
+struct BreakableContext {
+ may_break: bool,
+ coerce: CoerceMany,
+ label: Option<name::Name>,
+}
+
+fn find_breakable<'c>(
+ ctxs: &'c mut [BreakableContext],
+ label: Option<&name::Name>,
+) -> Option<&'c mut BreakableContext> {
+ match label {
+ Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label),
+ None => ctxs.last_mut(),
+ }
+}
+
+impl<'a> InferenceContext<'a> {
+ fn new(
+ db: &'a dyn HirDatabase,
+ owner: DefWithBodyId,
+ body: &'a Body,
+ resolver: Resolver,
+ ) -> Self {
+ let krate = owner.module(db.upcast()).krate();
+ let trait_env = owner
+ .as_generic_def_id()
+ .map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
+ InferenceContext {
+ result: InferenceResult::default(),
+ table: unify::InferenceTable::new(db, trait_env.clone()),
+ trait_env,
+ return_ty: TyKind::Error.intern(Interner), // set in collect_fn_signature
+ db,
+ owner,
+ body,
+ resolver,
+ diverges: Diverges::Maybe,
+ breakables: Vec::new(),
+ }
+ }
+
+ fn resolve_all(self) -> InferenceResult {
+ let InferenceContext { mut table, mut result, .. } = self;
+
+ // FIXME resolve obligations as well (use Guidance if necessary)
+ table.resolve_obligations_as_possible();
+
+ // make sure diverging type variables are marked as such
+ table.propagate_diverging_flag();
+ for ty in result.type_of_expr.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for ty in result.type_of_pat.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for mismatch in result.type_mismatches.values_mut() {
+ mismatch.expected = table.resolve_completely(mismatch.expected.clone());
+ mismatch.actual = table.resolve_completely(mismatch.actual.clone());
+ }
+ for (_, subst) in result.method_resolutions.values_mut() {
+ *subst = table.resolve_completely(subst.clone());
+ }
+ for adjustment in result.expr_adjustments.values_mut().flatten() {
+ adjustment.target = table.resolve_completely(adjustment.target.clone());
+ }
+ for adjustment in result.pat_adjustments.values_mut().flatten() {
+ *adjustment = table.resolve_completely(adjustment.clone());
+ }
+ result
+ }
+
+ fn collect_const(&mut self, data: &ConstData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_static(&mut self, data: &StaticData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_fn(&mut self, func: FunctionId) {
+ let data = self.db.function_data(func);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Param);
+ let param_tys =
+ data.params.iter().map(|(_, type_ref)| ctx.lower_ty(type_ref)).collect::<Vec<_>>();
+ for (ty, pat) in param_tys.into_iter().zip(self.body.params.iter()) {
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ let error_ty = &TypeRef::Error;
+ let return_ty = if data.has_async_kw() {
+ data.async_ret_type.as_deref().unwrap_or(error_ty)
+ } else {
+ &*data.ret_type
+ };
+ let return_ty = self.make_ty_with_mode(return_ty, ImplTraitLoweringMode::Opaque);
+ self.return_ty = return_ty;
+
+ if let Some(rpits) = self.db.return_type_impl_traits(func) {
+ // RPIT opaque types use substitution of their parent function.
+ let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
+ self.return_ty = fold_tys(
+ self.return_ty.clone(),
+ |ty, _| {
+ let opaque_ty_id = match ty.kind(Interner) {
+ TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id,
+ _ => return ty,
+ };
+ let idx = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
+ ImplTraitId::ReturnTypeImplTrait(_, idx) => idx,
+ _ => unreachable!(),
+ };
+ let bounds = (*rpits).map_ref(|rpits| {
+ rpits.impl_traits[idx as usize].bounds.map_ref(|it| it.into_iter())
+ });
+ let var = self.table.new_type_var();
+ let var_subst = Substitution::from1(Interner, var.clone());
+ for bound in bounds {
+ let predicate =
+ bound.map(|it| it.cloned()).substitute(Interner, &fn_placeholders);
+ let (var_predicate, binders) = predicate
+ .substitute(Interner, &var_subst)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(var_predicate.cast(Interner));
+ }
+ var
+ },
+ DebruijnIndex::INNERMOST,
+ );
+ }
+ }
+
+ fn infer_body(&mut self) {
+ self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
+ }
+
+ fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
+ self.result.type_of_expr.insert(expr, ty);
+ }
+
+ fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
+ self.result.expr_adjustments.insert(expr, adjustments);
+ }
+
+ fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) {
+ self.result.method_resolutions.insert(expr, (func, subst));
+ }
+
+ fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
+ self.result.variant_resolutions.insert(id, variant);
+ }
+
+ fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
+ self.result.assoc_resolutions.insert(id, item);
+ }
+
+ fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
+ self.result.type_of_pat.insert(pat, ty);
+ }
+
+ fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
+ self.result.diagnostics.push(diagnostic);
+ }
+
+ fn make_ty_with_mode(
+ &mut self,
+ type_ref: &TypeRef,
+ impl_trait_mode: ImplTraitLoweringMode,
+ ) -> Ty {
+ // FIXME use right resolver for block
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(impl_trait_mode);
+ let ty = ctx.lower_ty(type_ref);
+ let ty = self.insert_type_vars(ty);
+ self.normalize_associated_types_in(ty)
+ }
+
+ fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
+ self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
+ }
+
+ fn err_ty(&self) -> Ty {
+ self.result.standard_types.unknown.clone()
+ }
+
+ /// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
+ fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
+ let data = c.data(Interner);
+ match data.value {
+ ConstValue::Concrete(cc) => match cc.interned {
+ hir_def::type_ref::ConstScalar::Unknown => {
+ self.table.new_const_var(data.ty.clone())
+ }
+ _ => c,
+ },
+ _ => c,
+ }
+ }
+
+ /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
+ fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
+ match ty.kind(Interner) {
+ TyKind::Error => self.table.new_type_var(),
+ TyKind::InferenceVar(..) => {
+ let ty_resolved = self.resolve_ty_shallow(&ty);
+ if ty_resolved.is_unknown() {
+ self.table.new_type_var()
+ } else {
+ ty
+ }
+ }
+ _ => ty,
+ }
+ }
+
+ fn insert_type_vars(&mut self, ty: Ty) -> Ty {
+ fold_tys_and_consts(
+ ty,
+ |x, _| match x {
+ Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
+ Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ fn resolve_obligations_as_possible(&mut self) {
+ self.table.resolve_obligations_as_possible();
+ }
+
+ fn push_obligation(&mut self, o: DomainGoal) {
+ self.table.register_obligation(o.cast(Interner));
+ }
+
+ fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ self.table.unify(ty1, ty2)
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ self.table.normalize_associated_types_in(ty)
+ }
+
+ fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+ self.table.resolve_ty_shallow(ty)
+ }
+
+ fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
+ self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
+ }
+
+ fn resolve_associated_type_with_params(
+ &mut self,
+ inner_ty: Ty,
+ assoc_ty: Option<TypeAliasId>,
+ params: &[GenericArg],
+ ) -> Ty {
+ match assoc_ty {
+ Some(res_assoc_ty) => {
+ let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
+ hir_def::ItemContainerId::TraitId(trait_) => trait_,
+ _ => panic!("resolve_associated_type called with non-associated type"),
+ };
+ let ty = self.table.new_type_var();
+ let mut param_iter = params.iter().cloned();
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(inner_ty)
+ .fill(|_| param_iter.next().unwrap())
+ .build();
+ let alias_eq = AliasEq {
+ alias: AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(res_assoc_ty),
+ substitution: trait_ref.substitution.clone(),
+ }),
+ ty: ty.clone(),
+ };
+ self.push_obligation(trait_ref.cast(Interner));
+ self.push_obligation(alias_eq.cast(Interner));
+ ty
+ }
+ None => self.err_ty(),
+ }
+ }
+
+ fn resolve_variant(&mut self, path: Option<&Path>, value_ns: bool) -> (Ty, Option<VariantId>) {
+ let path = match path {
+ Some(path) => path,
+ None => return (self.err_ty(), None),
+ };
+ let resolver = &self.resolver;
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ // FIXME: this should resolve assoc items as well, see this example:
+ // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
+ let (resolution, unresolved) = if value_ns {
+ match resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
+ Some(ResolveValueResult::ValueNs(value)) => match value {
+ ValueNs::EnumVariantId(var) => {
+ let substs = ctx.substs_from_path(path, var.into(), true);
+ let ty = self.db.ty(var.parent.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ return (ty, Some(var.into()));
+ }
+ ValueNs::StructId(strukt) => {
+ let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let ty = self.db.ty(strukt.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ return (ty, Some(strukt.into()));
+ }
+ _ => return (self.err_ty(), None),
+ },
+ Some(ResolveValueResult::Partial(typens, unresolved)) => (typens, Some(unresolved)),
+ None => return (self.err_ty(), None),
+ }
+ } else {
+ match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (self.err_ty(), None),
+ }
+ };
+ return match resolution {
+ TypeNs::AdtId(AdtId::StructId(strukt)) => {
+ let substs = ctx.substs_from_path(path, strukt.into(), true);
+ let ty = self.db.ty(strukt.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
+ }
+ TypeNs::AdtId(AdtId::UnionId(u)) => {
+ let substs = ctx.substs_from_path(path, u.into(), true);
+ let ty = self.db.ty(u.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(u.into())), unresolved)
+ }
+ TypeNs::EnumVariantId(var) => {
+ let substs = ctx.substs_from_path(path, var.into(), true);
+ let ty = self.db.ty(var.parent.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+ forbid_unresolved_segments((ty, Some(var.into())), unresolved)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = generics.placeholder_subst(self.db);
+ let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
+ self.resolve_variant_on_alias(ty, unresolved, path)
+ }
+ TypeNs::TypeAliasId(it) => {
+ let ty = TyBuilder::def_ty(self.db, it.into())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.resolve_variant_on_alias(ty, unresolved, path)
+ }
+ TypeNs::AdtSelfType(_) => {
+ // FIXME this could happen in array size expressions, once we're checking them
+ (self.err_ty(), None)
+ }
+ TypeNs::GenericParam(_) => {
+ // FIXME potentially resolve assoc type
+ (self.err_ty(), None)
+ }
+ TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
+ // FIXME diagnostic
+ (self.err_ty(), None)
+ }
+ };
+
+ fn forbid_unresolved_segments(
+ result: (Ty, Option<VariantId>),
+ unresolved: Option<usize>,
+ ) -> (Ty, Option<VariantId>) {
+ if unresolved.is_none() {
+ result
+ } else {
+ // FIXME diagnostic
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ fn resolve_variant_on_alias(
+ &mut self,
+ ty: Ty,
+ unresolved: Option<usize>,
+ path: &Path,
+ ) -> (Ty, Option<VariantId>) {
+ let remaining = unresolved.map(|x| path.segments().skip(x).len()).filter(|x| x > &0);
+ match remaining {
+ None => {
+ let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
+ AdtId::StructId(s) => Some(VariantId::StructId(s)),
+ AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
+ AdtId::EnumId(_) => {
+ // FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
+ None
+ }
+ });
+ (ty, variant)
+ }
+ Some(1) => {
+ let segment = path.mod_path().segments().last().unwrap();
+ // this could be an enum variant or associated type
+ if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
+ let enum_data = self.db.enum_data(enum_id);
+ if let Some(local_id) = enum_data.variant(segment) {
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ return (ty, Some(variant.into()));
+ }
+ }
+ // FIXME potentially resolve assoc type
+ (self.err_ty(), None)
+ }
+ Some(_) => {
+ // FIXME diagnostic
+ (self.err_ty(), None)
+ }
+ }
+ }
+
+ fn resolve_lang_item(&self, name: Name) -> Option<LangItemTarget> {
+ let krate = self.resolver.krate();
+ self.db.lang_item(krate, name.to_smol_str())
+ }
+
+ fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
+ let path = path![core::iter::IntoIterator];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Item])
+ }
+
+ fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
+ // FIXME resolve via lang_item once try v2 is stable
+ let path = path![core::ops::Try];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ let trait_data = self.db.trait_data(trait_);
+ trait_data
+ // FIXME remove once try v2 is stable
+ .associated_type_by_name(&name![Ok])
+ .or_else(|| trait_data.associated_type_by_name(&name![Output]))
+ }
+
+ fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![not])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item(name![future_trait])?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_boxed_box(&self) -> Option<AdtId> {
+ let struct_ = self.resolve_lang_item(name![owned_box])?.as_struct()?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_full(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFull];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range(&self) -> Option<AdtId> {
+ let path = path![core::ops::Range];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_from(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFrom];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeTo];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeToInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_ops_index(&self) -> Option<TraitId> {
+ self.resolve_lang_item(name![index])?.as_trait()
+ }
+
+ fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_ops_index()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+}
+
+/// When inferring an expression, we propagate downward whatever type hint we
+/// are able in the form of an `Expectation`.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub(crate) enum Expectation {
+ None,
+ HasType(Ty),
+ // Castable(Ty), // rustc has this, we currently just don't propagate an expectation for casts
+ RValueLikeUnsized(Ty),
+}
+
+impl Expectation {
+ /// The expectation that the type of the expression needs to equal the given
+ /// type.
+ fn has_type(ty: Ty) -> Self {
+ if ty.is_unknown() {
+ // FIXME: get rid of this?
+ Expectation::None
+ } else {
+ Expectation::HasType(ty)
+ }
+ }
+
+ fn from_option(ty: Option<Ty>) -> Self {
+ ty.map_or(Expectation::None, Expectation::HasType)
+ }
+
+ /// The following explanation is copied straight from rustc:
+ /// Provides an expectation for an rvalue expression given an *optional*
+ /// hint, which is not required for type safety (the resulting type might
+ /// be checked higher up, as is the case with `&expr` and `box expr`), but
+ /// is useful in determining the concrete type.
+ ///
+ /// The primary use case is where the expected type is a fat pointer,
+ /// like `&[isize]`. For example, consider the following statement:
+ ///
+ /// let x: &[isize] = &[1, 2, 3];
+ ///
+ /// In this case, the expected type for the `&[1, 2, 3]` expression is
+ /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
+ /// expectation `ExpectHasType([isize])`, that would be too strong --
+ /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
+ /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
+ /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
+ /// which still is useful, because it informs integer literals and the like.
+ /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
+ /// for examples of where this comes up,.
+ fn rvalue_hint(table: &mut unify::InferenceTable<'_>, ty: Ty) -> Self {
+ // FIXME: do struct_tail_without_normalization
+ match table.resolve_ty_shallow(&ty).kind(Interner) {
+ TyKind::Slice(_) | TyKind::Str | TyKind::Dyn(_) => Expectation::RValueLikeUnsized(ty),
+ _ => Expectation::has_type(ty),
+ }
+ }
+
+ /// This expresses no expectation on the type.
+ fn none() -> Self {
+ Expectation::None
+ }
+
+ fn resolve(&self, table: &mut unify::InferenceTable<'_>) -> Expectation {
+ match self {
+ Expectation::None => Expectation::None,
+ Expectation::HasType(t) => Expectation::HasType(table.resolve_ty_shallow(t)),
+ Expectation::RValueLikeUnsized(t) => {
+ Expectation::RValueLikeUnsized(table.resolve_ty_shallow(t))
+ }
+ }
+ }
+
+ fn to_option(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
+ match self.resolve(table) {
+ Expectation::None => None,
+ Expectation::HasType(t) |
+ // Expectation::Castable(t) |
+ Expectation::RValueLikeUnsized(t) => Some(t),
+ }
+ }
+
+ fn only_has_type(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
+ match self {
+ Expectation::HasType(t) => Some(table.resolve_ty_shallow(t)),
+ // Expectation::Castable(_) |
+ Expectation::RValueLikeUnsized(_) | Expectation::None => None,
+ }
+ }
+
+ /// Comment copied from rustc:
+ /// Disregard "castable to" expectations because they
+ /// can lead us astray. Consider for example `if cond
+ /// {22} else {c} as u8` -- if we propagate the
+ /// "castable to u8" constraint to 22, it will pick the
+ /// type 22u8, which is overly constrained (c might not
+ /// be a u8). In effect, the problem is that the
+ /// "castable to" expectation is not the tightest thing
+ /// we can say, so we want to drop it in this case.
+ /// The tightest thing we can say is "must unify with
+ /// else branch". Note that in the case of a "has type"
+ /// constraint, this limitation does not hold.
+ ///
+ /// If the expected type is just a type variable, then don't use
+ /// an expected type. Otherwise, we might write parts of the type
+ /// when checking the 'then' block which are incompatible with the
+ /// 'else' branch.
+ fn adjust_for_branches(&self, table: &mut unify::InferenceTable<'_>) -> Expectation {
+ match self {
+ Expectation::HasType(ety) => {
+ let ety = table.resolve_ty_shallow(ety);
+ if !ety.is_ty_var() {
+ Expectation::HasType(ety)
+ } else {
+ Expectation::None
+ }
+ }
+ Expectation::RValueLikeUnsized(ety) => Expectation::RValueLikeUnsized(ety.clone()),
+ _ => Expectation::None,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+enum Diverges {
+ Maybe,
+ Always,
+}
+
+impl Diverges {
+ fn is_always(self) -> bool {
+ self == Diverges::Always
+ }
+}
+
+impl std::ops::BitAnd for Diverges {
+ type Output = Self;
+ fn bitand(self, other: Self) -> Self {
+ std::cmp::min(self, other)
+ }
+}
+
+impl std::ops::BitOr for Diverges {
+ type Output = Self;
+ fn bitor(self, other: Self) -> Self {
+ std::cmp::max(self, other)
+ }
+}
+
+impl std::ops::BitAndAssign for Diverges {
+ fn bitand_assign(&mut self, other: Self) {
+ *self = *self & other;
+ }
+}
+
+impl std::ops::BitOrAssign for Diverges {
+ fn bitor_assign(&mut self, other: Self) {
+ *self = *self | other;
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
new file mode 100644
index 000000000..3ead92909
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -0,0 +1,82 @@
+//! Inference of closure parameter types based on the closure's expected type.
+
+use chalk_ir::{cast::Cast, AliasEq, AliasTy, FnSubst, WhereClause};
+use hir_def::{expr::ExprId, HasModule};
+use smallvec::SmallVec;
+
+use crate::{
+ to_chalk_trait_id, utils, ChalkTraitId, DynTy, FnPointer, FnSig, Interner, Substitution, Ty,
+ TyExt, TyKind,
+};
+
+use super::{Expectation, InferenceContext};
+
+impl InferenceContext<'_> {
+ pub(super) fn deduce_closure_type_from_expectations(
+ &mut self,
+ closure_expr: ExprId,
+ closure_ty: &Ty,
+ sig_ty: &Ty,
+ expectation: &Expectation,
+ ) {
+ let expected_ty = match expectation.to_option(&mut self.table) {
+ Some(ty) => ty,
+ None => return,
+ };
+
+ // Deduction from where-clauses in scope, as well as fn-pointer coercion are handled here.
+ let _ = self.coerce(Some(closure_expr), closure_ty, &expected_ty);
+
+ // Deduction based on the expected `dyn Fn` is done separately.
+ if let TyKind::Dyn(dyn_ty) = expected_ty.kind(Interner) {
+ if let Some(sig) = self.deduce_sig_from_dyn_ty(dyn_ty) {
+ let expected_sig_ty = TyKind::Function(sig).intern(Interner);
+
+ self.unify(sig_ty, &expected_sig_ty);
+ }
+ }
+ }
+
+ fn deduce_sig_from_dyn_ty(&self, dyn_ty: &DynTy) -> Option<FnPointer> {
+ // Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
+
+ let fn_traits: SmallVec<[ChalkTraitId; 3]> =
+ utils::fn_traits(self.db.upcast(), self.owner.module(self.db.upcast()).krate())
+ .map(to_chalk_trait_id)
+ .collect();
+
+ let self_ty = TyKind::Error.intern(Interner);
+ let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]);
+ for bound in bounds.iter(Interner) {
+ // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer`
+ if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
+ bound.skip_binders()
+ {
+ let assoc_data = self.db.associated_ty_data(projection.associated_ty_id);
+ if !fn_traits.contains(&assoc_data.trait_id) {
+ return None;
+ }
+
+ // Skip `Self`, get the type argument.
+ let arg = projection.substitution.as_slice(Interner).get(1)?;
+ if let Some(subst) = arg.ty(Interner)?.as_tuple() {
+ let generic_args = subst.as_slice(Interner);
+ let mut sig_tys = Vec::new();
+ for arg in generic_args {
+ sig_tys.push(arg.ty(Interner)?.clone());
+ }
+ sig_tys.push(ty.clone());
+
+ cov_mark::hit!(dyn_fn_param_informs_call_site_closure_signature);
+ return Some(FnPointer {
+ num_binders: bound.len(Interner),
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(Substitution::from_iter(Interner, sig_tys)),
+ });
+ }
+ }
+ }
+
+ None
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
new file mode 100644
index 000000000..f54440bf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -0,0 +1,673 @@
+//! Coercion logic. Coercions are certain type conversions that can implicitly
+//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
+//! like going from `&Vec<T>` to `&[T]`.
+//!
+//! See <https://doc.rust-lang.org/nomicon/coercions.html> and
+//! `librustc_typeck/check/coercion.rs`.
+
+use std::{iter, sync::Arc};
+
+use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyVariableKind};
+use hir_def::{expr::ExprId, lang_item::LangItemTarget};
+use stdx::always;
+use syntax::SmolStr;
+
+use crate::{
+ autoderef::{Autoderef, AutoderefKind},
+ db::HirDatabase,
+ infer::{
+ Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
+ TypeError, TypeMismatch,
+ },
+ static_lifetime, Canonical, DomainGoal, FnPointer, FnSig, Guidance, InEnvironment, Interner,
+ Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::unify::InferenceTable;
+
+pub(crate) type CoerceResult = Result<InferOk<(Vec<Adjustment>, Ty)>, TypeError>;
+
+/// Do not require any adjustments, i.e. coerce `x -> x`.
+fn identity(_: Ty) -> Vec<Adjustment> {
+ vec![]
+}
+
+fn simple(kind: Adjust) -> impl FnOnce(Ty) -> Vec<Adjustment> {
+ move |target| vec![Adjustment { kind, target }]
+}
+
+/// This always returns `Ok(...)`.
+fn success(
+ adj: Vec<Adjustment>,
+ target: Ty,
+ goals: Vec<InEnvironment<Goal<Interner>>>,
+) -> CoerceResult {
+ Ok(InferOk { goals, value: (adj, target) })
+}
+
+#[derive(Clone, Debug)]
+pub(super) struct CoerceMany {
+ expected_ty: Ty,
+}
+
+impl CoerceMany {
+ pub(super) fn new(expected: Ty) -> Self {
+ CoerceMany { expected_ty: expected }
+ }
+
+ /// Merge two types from different branches, with possible coercion.
+ ///
+ /// Mostly this means trying to coerce one to the other, but
+ /// - if we have two function types for different functions or closures, we need to
+ /// coerce both to function pointers;
+ /// - if we were concerned with lifetime subtyping, we'd need to look for a
+ /// least upper bound.
+ pub(super) fn coerce(
+ &mut self,
+ ctx: &mut InferenceContext<'_>,
+ expr: Option<ExprId>,
+ expr_ty: &Ty,
+ ) {
+ let expr_ty = ctx.resolve_ty_shallow(expr_ty);
+ self.expected_ty = ctx.resolve_ty_shallow(&self.expected_ty);
+
+ // Special case: two function types. Try to coerce both to
+ // pointers to have a chance at getting a match. See
+ // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
+ let sig = match (self.expected_ty.kind(Interner), expr_ty.kind(Interner)) {
+ (TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
+ // FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
+ // we should be coercing the closure to a fn pointer of the safety of the FnDef
+ cov_mark::hit!(coerce_fn_reification);
+ let sig =
+ self.expected_ty.callable_sig(ctx.db).expect("FnDef without callable sig");
+ Some(sig)
+ }
+ _ => None,
+ };
+ if let Some(sig) = sig {
+ let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
+ let result1 = ctx.table.coerce_inner(self.expected_ty.clone(), &target_ty);
+ let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
+ if let (Ok(result1), Ok(result2)) = (result1, result2) {
+ ctx.table.register_infer_ok(result1);
+ ctx.table.register_infer_ok(result2);
+ return self.expected_ty = target_ty;
+ }
+ }
+
+ // It might not seem like it, but order is important here: If the expected
+ // type is a type variable and the new one is `!`, trying it the other
+ // way around first would mean we make the type variable `!`, instead of
+ // just marking it as possibly diverging.
+ if ctx.coerce(expr, &expr_ty, &self.expected_ty).is_ok() {
+ /* self.expected_ty is already correct */
+ } else if ctx.coerce(expr, &self.expected_ty, &expr_ty).is_ok() {
+ self.expected_ty = expr_ty;
+ } else {
+ if let Some(id) = expr {
+ ctx.result.type_mismatches.insert(
+ id.into(),
+ TypeMismatch { expected: self.expected_ty.clone(), actual: expr_ty },
+ );
+ }
+ cov_mark::hit!(coerce_merge_fail_fallback);
+ /* self.expected_ty is already correct */
+ }
+ }
+
+ pub(super) fn complete(self) -> Ty {
+ self.expected_ty
+ }
+}
+
+pub fn could_coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ coerce(db, env, tys).is_ok()
+}
+
+pub(crate) fn coerce(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = table.fresh_subst(tys.binders.as_slice(Interner));
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ let (adjustments, ty) = table.coerce(&ty1_with_vars, &ty2_with_vars)?;
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ // FIXME also map the types in the adjustments
+ Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
+}
+
+impl<'a> InferenceContext<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(super) fn coerce(
+ &mut self,
+ expr: Option<ExprId>,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<Ty, TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ let (adjustments, ty) = self.table.coerce(&from_ty, &to_ty)?;
+ if let Some(expr) = expr {
+ self.write_expr_adj(expr, adjustments);
+ }
+ Ok(ty)
+ }
+}
+
+impl<'a> InferenceTable<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(crate) fn coerce(
+ &mut self,
+ from_ty: &Ty,
+ to_ty: &Ty,
+ ) -> Result<(Vec<Adjustment>, Ty), TypeError> {
+ let from_ty = self.resolve_ty_shallow(from_ty);
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ match self.coerce_inner(from_ty, &to_ty) {
+ Ok(InferOk { value: (adjustments, ty), goals }) => {
+ self.register_infer_ok(InferOk { value: (), goals });
+ Ok((adjustments, ty))
+ }
+ Err(e) => {
+ // FIXME deal with error
+ Err(e)
+ }
+ }
+ }
+
+ fn coerce_inner(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ if from_ty.is_never() {
+ // Subtle: If we are coercing from `!` to `?T`, where `?T` is an unbound
+ // type variable, we want `?T` to fallback to `!` if not
+ // otherwise constrained. An example where this arises:
+ //
+ // let _: Option<?T> = Some({ return; });
+ //
+ // here, we would coerce from `!` to `?T`.
+ if let TyKind::InferenceVar(tv, TyVariableKind::General) = to_ty.kind(Interner) {
+ self.set_diverging(*tv, true);
+ }
+ return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]);
+ }
+
+ // Consider coercing the subtype to a DST
+ if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
+ return Ok(ret);
+ }
+
+ // Examine the supertype and consider auto-borrowing.
+ match to_ty.kind(Interner) {
+ TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
+ TyKind::Ref(mt, _, _) => return self.coerce_ref(from_ty, to_ty, *mt),
+ _ => {}
+ }
+
+ match from_ty.kind(Interner) {
+ TyKind::FnDef(..) => {
+ // Function items are coercible to any closure
+ // type; function pointers are not (that would
+ // require double indirection).
+ // Additionally, we permit coercion of function
+ // items to drop the unsafe qualifier.
+ self.coerce_from_fn_item(from_ty, to_ty)
+ }
+ TyKind::Function(from_fn_ptr) => {
+ // We permit coercion of fn pointers to drop the
+ // unsafe qualifier.
+ self.coerce_from_fn_pointer(from_ty.clone(), from_fn_ptr, to_ty)
+ }
+ TyKind::Closure(_, from_substs) => {
+ // Non-capturing closures are coercible to
+ // function pointers or unsafe function pointers.
+ // It cannot convert closures that require unsafe.
+ self.coerce_closure_to_fn(from_ty.clone(), from_substs, to_ty)
+ }
+ _ => {
+ // Otherwise, just use unification rules.
+ self.unify_and(&from_ty, to_ty, identity)
+ }
+ }
+ }
+
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and<F>(&mut self, t1: &Ty, t2: &Ty, f: F) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ self.try_unify(t1, t2)
+ .and_then(|InferOk { goals, .. }| success(f(t1.clone()), t1.clone(), goals))
+ }
+
+ fn coerce_ptr(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let (is_ref, from_mt, from_inner) = match from_ty.kind(Interner) {
+ TyKind::Ref(mt, _, ty) => (true, mt, ty),
+ TyKind::Raw(mt, ty) => (false, mt, ty),
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ // Check that the types which they point at are compatible.
+ let from_raw = TyKind::Raw(to_mt, from_inner.clone()).intern(Interner);
+
+ // Although references and unsafe ptrs have the same
+ // representation, we still register an Adjust::DerefRef so that
+ // regionck knows that the region for `a` must be valid here.
+ if is_ref {
+ self.unify_and(&from_raw, to_ty, |target| {
+ vec![
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)), target },
+ ]
+ })
+ } else if *from_mt != to_mt {
+ self.unify_and(
+ &from_raw,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::MutToConstPointer)),
+ )
+ } else {
+ self.unify_and(&from_raw, to_ty, identity)
+ }
+ }
+
+ /// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
+ /// To match `A` with `B`, autoderef will be performed,
+ /// calling `deref`/`deref_mut` where necessary.
+ fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
+ let from_mt = match from_ty.kind(Interner) {
+ &TyKind::Ref(mt, _, _) => {
+ coerce_mutabilities(mt, to_mt)?;
+ mt
+ }
+ _ => return self.unify_and(&from_ty, to_ty, identity),
+ };
+
+ // NOTE: this code is mostly copied and adapted from rustc, and
+ // currently more complicated than necessary, carrying errors around
+ // etc.. This complication will become necessary when we actually track
+ // details of coercion errors though, so I think it's useful to leave
+ // the structure like it is.
+
+ let snapshot = self.snapshot();
+
+ let mut autoderef = Autoderef::new(self, from_ty.clone());
+ let mut first_error = None;
+ let mut found = None;
+
+ while let Some((referent_ty, autoderefs)) = autoderef.next() {
+ if autoderefs == 0 {
+ // Don't let this pass, otherwise it would cause
+ // &T to autoref to &&T.
+ continue;
+ }
+
+ // At this point, we have deref'd `a` to `referent_ty`. So
+ // imagine we are coercing from `&'a mut Vec<T>` to `&'b mut [T]`.
+ // In the autoderef loop for `&'a mut Vec<T>`, we would get
+ // three callbacks:
+ //
+ // - `&'a mut Vec<T>` -- 0 derefs, just ignore it
+ // - `Vec<T>` -- 1 deref
+ // - `[T]` -- 2 deref
+ //
+ // At each point after the first callback, we want to
+ // check to see whether this would match out target type
+ // (`&'b mut [T]`) if we autoref'd it. We can't just
+ // compare the referent types, though, because we still
+ // have to consider the mutability. E.g., in the case
+ // we've been considering, we have an `&mut` reference, so
+ // the `T` in `[T]` needs to be unified with equality.
+ //
+ // Therefore, we construct reference types reflecting what
+ // the types will be after we do the final auto-ref and
+ // compare those. Note that this means we use the target
+ // mutability [1], since it may be that we are coercing
+ // from `&mut T` to `&U`.
+ let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc
+ let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
+ match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
+ Ok(result) => {
+ found = Some(result.map(|()| derefd_from_ty));
+ break;
+ }
+ Err(err) => {
+ if first_error.is_none() {
+ first_error = Some(err);
+ }
+ }
+ }
+ }
+
+ // Extract type or return an error. We return the first error
+ // we got, which should be from relating the "base" type
+ // (e.g., in example above, the failure from relating `Vec<T>`
+ // to the target type), since that should be the least
+ // confusing.
+ let InferOk { value: ty, goals } = match found {
+ Some(d) => d,
+ None => {
+ self.rollback_to(snapshot);
+ let err = first_error.expect("coerce_borrowed_pointer had no error");
+ return Err(err);
+ }
+ };
+ if ty == from_ty && from_mt == Mutability::Not && autoderef.step_count() == 1 {
+ // As a special case, if we would produce `&'a *x`, that's
+ // a total no-op. We end up with the type `&'a T` just as
+ // we started with. In that case, just skip it
+ // altogether. This is just an optimization.
+ //
+ // Note that for `&mut`, we DO want to reborrow --
+ // otherwise, this would be a move, which might be an
+ // error. For example `foo(self.x)` where `self` and
+ // `self.x` both have `&mut `type would be a move of
+ // `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
+ // which is a borrow.
+ always!(to_mt == Mutability::Not); // can only coerce &T -> &U
+ return success(vec![], ty, goals);
+ }
+
+ let mut adjustments = auto_deref_adjust_steps(&autoderef);
+ adjustments
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() });
+
+ success(adjustments, ty, goals)
+ }
+
+ /// Attempts to coerce from the type of a Rust function item into a function pointer.
+ fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let from_sig = from_ty.callable_sig(self.db).expect("FnDef had no sig");
+
+ // FIXME check ABI: Intrinsics are not coercible to function pointers
+ // FIXME Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396)
+
+ // FIXME rustc normalizes assoc types in the sig here, not sure if necessary
+
+ let from_sig = from_sig.to_fn_ptr();
+ let from_fn_pointer = TyKind::Function(from_sig.clone()).intern(Interner);
+ let ok = self.coerce_from_safe_fn(
+ from_fn_pointer.clone(),
+ &from_sig,
+ to_ty,
+ |unsafe_ty| {
+ vec![
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::ReifyFnPointer),
+ target: from_fn_pointer,
+ },
+ Adjustment {
+ kind: Adjust::Pointer(PointerCast::UnsafeFnPointer),
+ target: unsafe_ty,
+ },
+ ]
+ },
+ simple(Adjust::Pointer(PointerCast::ReifyFnPointer)),
+ )?;
+
+ Ok(ok)
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ fn coerce_from_fn_pointer(
+ &mut self,
+ from_ty: Ty,
+ from_f: &FnPointer,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ self.coerce_from_safe_fn(
+ from_ty,
+ from_f,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)),
+ identity,
+ )
+ }
+
+ fn coerce_from_safe_fn<F, G>(
+ &mut self,
+ from_ty: Ty,
+ from_fn_ptr: &FnPointer,
+ to_ty: &Ty,
+ to_unsafe: F,
+ normal: G,
+ ) -> CoerceResult
+ where
+ F: FnOnce(Ty) -> Vec<Adjustment>,
+ G: FnOnce(Ty) -> Vec<Adjustment>,
+ {
+ if let TyKind::Function(to_fn_ptr) = to_ty.kind(Interner) {
+ if let (chalk_ir::Safety::Safe, chalk_ir::Safety::Unsafe) =
+ (from_fn_ptr.sig.safety, to_fn_ptr.sig.safety)
+ {
+ let from_unsafe =
+ TyKind::Function(safe_to_unsafe_fn_ty(from_fn_ptr.clone())).intern(Interner);
+ return self.unify_and(&from_unsafe, to_ty, to_unsafe);
+ }
+ }
+ self.unify_and(&from_ty, to_ty, normal)
+ }
+
+ /// Attempts to coerce from the type of a non-capturing closure into a
+ /// function pointer.
+ fn coerce_closure_to_fn(
+ &mut self,
+ from_ty: Ty,
+ from_substs: &Substitution,
+ to_ty: &Ty,
+ ) -> CoerceResult {
+ match to_ty.kind(Interner) {
+ // if from_substs is non-capturing (FIXME)
+ TyKind::Function(fn_ty) => {
+ // We coerce the closure, which has fn type
+ // `extern "rust-call" fn((arg0,arg1,...)) -> _`
+ // to
+ // `fn(arg0,arg1,...) -> _`
+ // or
+ // `unsafe fn(arg0,arg1,...) -> _`
+ let safety = fn_ty.sig.safety;
+ let pointer_ty = coerce_closure_fn_ty(from_substs, safety);
+ self.unify_and(
+ &pointer_ty,
+ to_ty,
+ simple(Adjust::Pointer(PointerCast::ClosureFnPointer(safety))),
+ )
+ }
+ _ => self.unify_and(&from_ty, to_ty, identity),
+ }
+ }
+
+ /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
+ ///
+ /// See: <https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html>
+ fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> CoerceResult {
+ // These 'if' statements require some explanation.
+ // The `CoerceUnsized` trait is special - it is only
+ // possible to write `impl CoerceUnsized<B> for A` where
+ // A and B have 'matching' fields. This rules out the following
+ // two types of blanket impls:
+ //
+ // `impl<T> CoerceUnsized<T> for SomeType`
+ // `impl<T> CoerceUnsized<SomeType> for T`
+ //
+ // Both of these trigger a special `CoerceUnsized`-related error (E0376)
+ //
+ // We can take advantage of this fact to avoid performing unnecessary work.
+ // If either `source` or `target` is a type variable, then any applicable impl
+ // would need to be generic over the self-type (`impl<T> CoerceUnsized<SomeType> for T`)
+ // or generic over the `CoerceUnsized` type parameter (`impl<T> CoerceUnsized<T> for
+ // SomeType`).
+ //
+ // However, these are exactly the kinds of impls which are forbidden by
+ // the compiler! Therefore, we can be sure that coercion will always fail
+ // when either the source or target type is a type variable. This allows us
+ // to skip performing any trait selection, and immediately bail out.
+ if from_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+ if to_ty.is_ty_var() {
+ return Err(TypeError);
+ }
+
+ // Handle reborrows before trying to solve `Source: CoerceUnsized<Target>`.
+ let reborrow = match (from_ty.kind(Interner), to_ty.kind(Interner)) {
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ let lt = static_lifetime();
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)),
+ target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ (TyKind::Ref(from_mt, _, from_inner), &TyKind::Raw(to_mt, _)) => {
+ coerce_mutabilities(*from_mt, to_mt)?;
+
+ Some((
+ Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::RawPtr(to_mt)),
+ target: TyKind::Raw(to_mt, from_inner.clone()).intern(Interner),
+ },
+ ))
+ }
+ _ => None,
+ };
+ let coerce_from =
+ reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone());
+
+ let krate = self.trait_env.krate;
+ let coerce_unsized_trait =
+ match self.db.lang_item(krate, SmolStr::new_inline("coerce_unsized")) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return Err(TypeError),
+ };
+
+ let coerce_unsized_tref = {
+ let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait);
+ if b.remaining() != 2 {
+ // The CoerceUnsized trait should have two generic params: Self and T.
+ return Err(TypeError);
+ }
+ b.push(coerce_from).push(to_ty.clone()).build()
+ };
+
+ let goal: InEnvironment<DomainGoal> =
+ InEnvironment::new(&self.trait_env.env, coerce_unsized_tref.cast(Interner));
+
+ let canonicalized = self.canonicalize(goal);
+
+ // FIXME: rustc's coerce_unsized is more specialized -- it only tries to
+ // solve `CoerceUnsized` and `Unsize` goals at this point and leaves the
+ // rest for later. Also, there's some logic about sized type variables.
+ // Need to find out in what cases this is necessary
+ let solution = self
+ .db
+ .trait_solve(krate, canonicalized.value.clone().cast(Interner))
+ .ok_or(TypeError)?;
+
+ match solution {
+ Solution::Unique(v) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: v.binders,
+ // FIXME handle constraints
+ value: v.value.subst,
+ },
+ );
+ }
+ Solution::Ambig(Guidance::Definite(subst)) => {
+ // FIXME need to record an obligation here
+ canonicalized.apply_solution(self, subst)
+ }
+ // FIXME actually we maybe should also accept unknown guidance here
+ _ => return Err(TypeError),
+ };
+ let unsize =
+ Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: to_ty.clone() };
+ let adjustments = match reborrow {
+ None => vec![unsize],
+ Some((deref, autoref)) => vec![deref, autoref, unsize],
+ };
+ success(adjustments, to_ty.clone(), vec![])
+ }
+}
+
+fn coerce_closure_fn_ty(closure_substs: &Substitution, safety: chalk_ir::Safety) -> Ty {
+ let closure_sig = closure_substs.at(Interner, 0).assert_ty_ref(Interner).clone();
+ match closure_sig.kind(Interner) {
+ TyKind::Function(fn_ty) => TyKind::Function(FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety, ..fn_ty.sig },
+ substitution: fn_ty.substitution.clone(),
+ })
+ .intern(Interner),
+ _ => TyKind::Error.intern(Interner),
+ }
+}
+
+fn safe_to_unsafe_fn_ty(fn_ty: FnPointer) -> FnPointer {
+ FnPointer {
+ num_binders: fn_ty.num_binders,
+ sig: FnSig { safety: chalk_ir::Safety::Unsafe, ..fn_ty.sig },
+ substitution: fn_ty.substitution,
+ }
+}
+
+fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError> {
+ match (from, to) {
+ (Mutability::Mut, Mutability::Mut | Mutability::Not)
+ | (Mutability::Not, Mutability::Not) => Ok(()),
+ (Mutability::Not, Mutability::Mut) => Err(TypeError),
+ }
+}
+
+pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adjustment> {
+ let steps = autoderef.steps();
+ let targets =
+ steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty()));
+ steps
+ .iter()
+ .map(|(kind, _source)| match kind {
+ // We do not know what kind of deref we require at this point yet
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ })
+ .zip(targets)
+ .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target })
+ .collect()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
new file mode 100644
index 000000000..d164e64a8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -0,0 +1,1527 @@
+//! Type inference for expressions.
+
+use std::{
+ collections::hash_map::Entry,
+ iter::{repeat, repeat_with},
+ mem,
+};
+
+use chalk_ir::{
+ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
+};
+use hir_def::{
+ expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp},
+ generics::TypeOrConstParamData,
+ path::{GenericArg, GenericArgs},
+ resolver::resolver_for_expr,
+ ConstParamId, FieldId, FunctionId, ItemContainerId, Lookup,
+};
+use hir_expand::name::{name, Name};
+use stdx::always;
+use syntax::ast::RangeOp;
+
+use crate::{
+ autoderef::{self, Autoderef},
+ consteval,
+ infer::coerce::CoerceMany,
+ lower::{
+ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
+ },
+ mapping::{from_chalk, ToChalk},
+ method_resolution::{self, VisibleFromModule},
+ primitive::{self, UintTy},
+ static_lifetime, to_chalk_trait_id,
+ utils::{generics, Generics},
+ AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar,
+ Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::{
+ coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
+ Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(tgt_expr, expected);
+ if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
+ let could_unify = self.unify(&ty, &expected_ty);
+ if !could_unify {
+ self.result.type_mismatches.insert(
+ tgt_expr.into(),
+ TypeMismatch { expected: expected_ty, actual: ty.clone() },
+ );
+ }
+ }
+ ty
+ }
+
+ /// Infer type of expression with possibly implicit coerce to the expected type.
+ /// Return the type after possible coercion.
+ pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, expected);
+ if let Some(target) = expected.only_has_type(&mut self.table) {
+ match self.coerce(Some(expr), &ty, &target) {
+ Ok(res) => res,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: target.clone(), actual: ty.clone() },
+ );
+ target
+ }
+ }
+ } else {
+ ty
+ }
+ }
+
+ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ self.db.unwind_if_cancelled();
+
+ let ty = match &self.body[tgt_expr] {
+ Expr::Missing => self.err_ty(),
+ &Expr::If { condition, then_branch, else_branch } => {
+ self.infer_expr(
+ condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+
+ let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut both_arms_diverge = Diverges::Always;
+
+ let result_ty = self.table.new_type_var();
+ let then_ty = self.infer_expr_inner(then_branch, expected);
+ both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut coerce = CoerceMany::new(result_ty);
+ coerce.coerce(self, Some(then_branch), &then_ty);
+ let else_ty = match else_branch {
+ Some(else_branch) => self.infer_expr_inner(else_branch, expected),
+ None => TyBuilder::unit(),
+ };
+ both_arms_diverge &= self.diverges;
+ // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
+ coerce.coerce(self, else_branch, &else_ty);
+
+ self.diverges = condition_diverges | both_arms_diverge;
+
+ coerce.complete()
+ }
+ &Expr::Let { pat, expr } => {
+ let input_ty = self.infer_expr(expr, &Expectation::none());
+ self.infer_pat(pat, &input_ty, BindingMode::default());
+ TyKind::Scalar(Scalar::Bool).intern(Interner)
+ }
+ Expr::Block { statements, tail, label, id: _ } => {
+ let old_resolver = mem::replace(
+ &mut self.resolver,
+ resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
+ );
+ let ty = match label {
+ Some(_) => {
+ let break_ty = self.table.new_type_var();
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(break_ty.clone()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let ty = self.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ &Expectation::has_type(break_ty),
+ );
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+ if ctxt.may_break {
+ ctxt.coerce.complete()
+ } else {
+ ty
+ }
+ }
+ None => self.infer_block(tgt_expr, statements, *tail, expected),
+ };
+ self.resolver = old_resolver;
+ ty
+ }
+ Expr::Unsafe { body } | Expr::Const { body } => self.infer_expr(*body, expected),
+ Expr::TryBlock { body } => {
+ let _inner = self.infer_expr(*body, expected);
+ // FIXME should be std::result::Result<{inner}, _>
+ self.err_ty()
+ }
+ Expr::Async { body } => {
+ let ret_ty = self.table.new_type_var();
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ let inner_ty = self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ // Use the first type parameter as the output type of future.
+ // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
+ let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
+ .intern(Interner)
+ }
+ Expr::Loop { body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.table.new_type_var()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+
+ if ctxt.may_break {
+ self.diverges = Diverges::Maybe;
+ ctxt.coerce.complete()
+ } else {
+ TyKind::Never.intern(Interner)
+ }
+ }
+ Expr::While { condition, body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(
+ *condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::For { iterable, body, pat, label } => {
+ let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
+
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let pat_ty =
+ self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
+
+ self.infer_pat(*pat, &pat_ty, BindingMode::default());
+
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::Closure { body, args, ret_type, arg_types } => {
+ assert_eq!(args.len(), arg_types.len());
+
+ let mut sig_tys = Vec::new();
+
+ // collect explicitly written argument types
+ for arg_type in arg_types.iter() {
+ let arg_ty = match arg_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(arg_ty);
+ }
+
+ // add return type
+ let ret_ty = match ret_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(ret_ty.clone());
+ let sig_ty = TyKind::Function(FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(
+ Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner),
+ ),
+ })
+ .intern(Interner);
+ let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
+ let closure_ty =
+ TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
+ .intern(Interner);
+
+ // Eagerly try to relate the closure type with the expected
+ // type, otherwise we often won't have enough information to
+ // infer the body.
+ self.deduce_closure_type_from_expectations(
+ tgt_expr,
+ &closure_ty,
+ &sig_ty,
+ expected,
+ );
+
+ // Now go through the argument patterns
+ for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
+ self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
+ }
+
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ closure_ty
+ }
+ Expr::Call { callee, args, .. } => {
+ let callee_ty = self.infer_expr(*callee, &Expectation::none());
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
+ let mut res = None;
+ let mut derefed_callee = callee_ty.clone();
+ // manual loop to be able to access `derefs.table`
+ while let Some((callee_deref_ty, _)) = derefs.next() {
+ res = derefs.table.callable_sig(&callee_deref_ty, args.len());
+ if res.is_some() {
+ derefed_callee = callee_deref_ty;
+ break;
+ }
+ }
+ // if the function is unresolved, we use is_varargs=true to
+ // suppress the arg count diagnostic here
+ let is_varargs =
+ derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs)
+ || res.is_none();
+ let (param_tys, ret_ty) = match res {
+ Some(res) => {
+ let adjustments = auto_deref_adjust_steps(&derefs);
+ self.write_expr_adj(*callee, adjustments);
+ res
+ }
+ None => (Vec::new(), self.err_ty()), // FIXME diagnostic
+ };
+ let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
+ self.register_obligations_for_call(&callee_ty);
+
+ let expected_inputs = self.expected_inputs_for_expected_output(
+ expected,
+ ret_ty.clone(),
+ param_tys.clone(),
+ );
+
+ self.check_call_arguments(
+ tgt_expr,
+ args,
+ &expected_inputs,
+ &param_tys,
+ &indices_to_skip,
+ is_varargs,
+ );
+ self.normalize_associated_types_in(ret_ty)
+ }
+ Expr::MethodCall { receiver, args, method_name, generic_args } => self
+ .infer_method_call(
+ tgt_expr,
+ *receiver,
+ args,
+ method_name,
+ generic_args.as_deref(),
+ expected,
+ ),
+ Expr::Match { expr, arms } => {
+ let input_ty = self.infer_expr(*expr, &Expectation::none());
+
+ let expected = expected.adjust_for_branches(&mut self.table);
+
+ let result_ty = if arms.is_empty() {
+ TyKind::Never.intern(Interner)
+ } else {
+ match &expected {
+ Expectation::HasType(ty) => ty.clone(),
+ _ => self.table.new_type_var(),
+ }
+ };
+ let mut coerce = CoerceMany::new(result_ty);
+
+ let matchee_diverges = self.diverges;
+ let mut all_arms_diverge = Diverges::Always;
+
+ for arm in arms.iter() {
+ self.diverges = Diverges::Maybe;
+ let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
+ if let Some(guard_expr) = arm.guard {
+ self.infer_expr(
+ guard_expr,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ }
+
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ coerce.coerce(self, Some(arm.expr), &arm_ty);
+ }
+
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ coerce.complete()
+ }
+ Expr::Path(p) => {
+ // FIXME this could be more efficient...
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
+ self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Expr::Continue { .. } => TyKind::Never.intern(Interner),
+ Expr::Break { expr, label } => {
+ let mut coerce = match find_breakable(&mut self.breakables, label.as_ref()) {
+ Some(ctxt) => {
+ // avoiding the borrowck
+ mem::replace(
+ &mut ctxt.coerce,
+ CoerceMany::new(self.result.standard_types.unknown.clone()),
+ )
+ }
+ None => CoerceMany::new(self.result.standard_types.unknown.clone()),
+ };
+
+ let val_ty = if let Some(expr) = *expr {
+ self.infer_expr(expr, &Expectation::none())
+ } else {
+ TyBuilder::unit()
+ };
+
+ // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
+ coerce.coerce(self, *expr, &val_ty);
+
+ if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
+ ctxt.coerce = coerce;
+ ctxt.may_break = true;
+ } else {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ });
+ };
+
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
+ } else {
+ let unit = TyBuilder::unit();
+ let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Yield { expr } => {
+ // FIXME: track yield type for coercion
+ if let Some(expr) = expr {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::RecordLit { path, fields, spread, .. } => {
+ let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
+ if let Some(variant) = def_id {
+ self.write_variant_resolution(tgt_expr.into(), variant);
+ }
+
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ self.unify(&ty, &t);
+ }
+
+ let substs = ty
+ .as_adt()
+ .map(|(_, s)| s.clone())
+ .unwrap_or_else(|| Substitution::empty(Interner));
+ let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let variant_data = def_id.map(|it| it.variant_data(self.db.upcast()));
+ for field in fields.iter() {
+ let field_def =
+ variant_data.as_ref().and_then(|it| match it.field(&field.name) {
+ Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::NoSuchField {
+ expr: field.expr,
+ });
+ None
+ }
+ });
+ let field_ty = field_def.map_or(self.err_ty(), |it| {
+ field_types[it.local_id].clone().substitute(Interner, &substs)
+ });
+ self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
+ }
+ if let Some(expr) = spread {
+ self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
+ }
+ ty
+ }
+ Expr::Field { expr, name } => {
+ let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
+
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
+ let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
+ let (field_id, parameters) = match derefed_ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ return name.as_tuple_index().and_then(|idx| {
+ substs
+ .as_slice(Interner)
+ .get(idx)
+ .map(|a| a.assert_ty_ref(Interner))
+ .cloned()
+ });
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
+ let local_id = self.db.struct_data(*s).variant_data.field(name)?;
+ let field = FieldId { parent: (*s).into(), local_id };
+ (field, parameters.clone())
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
+ let local_id = self.db.union_data(*u).variant_data.field(name)?;
+ let field = FieldId { parent: (*u).into(), local_id };
+ (field, parameters.clone())
+ }
+ _ => return None,
+ };
+ let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
+ .is_visible_from(self.db.upcast(), self.resolver.module());
+ if !is_visible {
+ // Write down the first field resolution even if it is not visible
+ // This aids IDE features for private fields like goto def and in
+ // case of autoderef finding an applicable field, this will be
+ // overwritten in a following cycle
+ if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr)
+ {
+ entry.insert(field_id);
+ }
+ return None;
+ }
+ // can't have `write_field_resolution` here because `self.table` is borrowed :(
+ self.result.field_resolutions.insert(tgt_expr, field_id);
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &parameters);
+ Some(ty)
+ });
+ let ty = match ty {
+ Some(ty) => {
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ self.write_expr_adj(*expr, adjustments);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ ty
+ }
+ _ => self.err_ty(),
+ };
+ ty
+ }
+ Expr::Await { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
+ }
+ Expr::Try { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
+ }
+ Expr::Cast { expr, type_ref } => {
+ // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
+ let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let cast_ty = self.make_ty(type_ref);
+ // FIXME check the cast...
+ cast_ty
+ }
+ Expr::Ref { expr, rawness, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .and_then(|t| t.as_reference_or_ptr())
+ {
+ if exp_mutability == Mutability::Mut && mutability == Mutability::Not {
+ // FIXME: record type error - expected mut reference but found shared ref,
+ // which cannot be coerced
+ }
+ if exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
+ // FIXME: record type error - expected reference but found ptr,
+ // which cannot be coerced
+ }
+ Expectation::rvalue_hint(&mut self.table, Ty::clone(exp_inner))
+ } else {
+ Expectation::none()
+ };
+ let inner_ty = self.infer_expr_inner(*expr, &expectation);
+ match rawness {
+ Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
+ Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
+ }
+ .intern(Interner)
+ }
+ &Expr::Box { expr } => self.infer_expr_box(expr, expected),
+ Expr::UnaryOp { expr, op } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let inner_ty = self.resolve_ty_shallow(&inner_ty);
+ match op {
+ UnaryOp::Deref => {
+ autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
+ }
+ UnaryOp::Neg => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_))
+ | TyKind::InferenceVar(
+ _,
+ TyVariableKind::Integer | TyVariableKind::Float,
+ ) => inner_ty,
+ // Otherwise we resolve via the std::ops::Neg trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
+ }
+ }
+ UnaryOp::Not => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Bool | Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer) => inner_ty,
+ // Otherwise we resolve via the std::ops::Not trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
+ }
+ }
+ }
+ }
+ Expr::BinaryOp { lhs, rhs, op } => match op {
+ Some(BinaryOp::Assignment { op: None }) => {
+ let lhs = *lhs;
+ let is_ordinary = match &self.body[lhs] {
+ Expr::Array(_)
+ | Expr::RecordLit { .. }
+ | Expr::Tuple { .. }
+ | Expr::Underscore => false,
+ Expr::Call { callee, .. } => !matches!(&self.body[*callee], Expr::Path(_)),
+ _ => true,
+ };
+
+ // In ordinary (non-destructuring) assignments, the type of
+ // `lhs` must be inferred first so that the ADT fields
+ // instantiations in RHS can be coerced to it. Note that this
+ // cannot happen in destructuring assignments because of how
+ // they are desugared.
+ if is_ordinary {
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+ self.infer_expr_coerce(*rhs, &Expectation::has_type(lhs_ty));
+ } else {
+ let rhs_ty = self.infer_expr(*rhs, &Expectation::none());
+ self.infer_assignee_expr(lhs, &rhs_ty);
+ }
+ self.result.standard_types.unit.clone()
+ }
+ Some(BinaryOp::LogicOp(_)) => {
+ let bool_ty = self.result.standard_types.bool_.clone();
+ self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty.clone()));
+ let lhs_diverges = self.diverges;
+ self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty.clone()));
+ // Depending on the LHS' value, the RHS can never execute.
+ self.diverges = lhs_diverges;
+ bool_ty
+ }
+ Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr),
+ _ => self.err_ty(),
+ },
+ Expr::Range { lhs, rhs, range_type } => {
+ let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
+ let rhs_expect = lhs_ty
+ .as_ref()
+ .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
+ let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
+ match (range_type, lhs_ty, rhs_ty) {
+ (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
+ Some(adt) => TyBuilder::adt(self.db, adt).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, None, Some(ty)) => {
+ match self.resolve_range_to_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, Some(_), Some(ty)) => {
+ match self.resolve_range_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, _, None) => self.err_ty(),
+ }
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.infer_expr_inner(*base, &Expectation::none());
+ let index_ty = self.infer_expr(*index, &Expectation::none());
+
+ if let Some(index_trait) = self.resolve_ops_index() {
+ let canonicalized = self.canonicalize(base_ty.clone());
+ let receiver_adjustments = method_resolution::resolve_indexing_op(
+ self.db,
+ self.trait_env.clone(),
+ canonicalized.value,
+ index_trait,
+ );
+ let (self_ty, adj) = receiver_adjustments
+ .map_or((self.err_ty(), Vec::new()), |adj| {
+ adj.apply(&mut self.table, base_ty)
+ });
+ self.write_expr_adj(*base, adj);
+ self.resolve_associated_type_with_params(
+ self_ty,
+ self.resolve_ops_index_output(),
+ &[GenericArgData::Ty(index_ty).intern(Interner)],
+ )
+ } else {
+ self.err_ty()
+ }
+ }
+ Expr::Tuple { exprs, .. } => {
+ let mut tys = match expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .map(|t| t.kind(Interner))
+ {
+ Some(TyKind::Tuple(_, substs)) => substs
+ .iter(Interner)
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()))
+ .take(exprs.len())
+ .collect::<Vec<_>>(),
+ _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
+ };
+
+ for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
+ }
+
+ TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
+ }
+ Expr::Array(array) => {
+ let elem_ty =
+ match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
+ Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
+ _ => self.table.new_type_var(),
+ };
+ let mut coerce = CoerceMany::new(elem_ty.clone());
+
+ let expected = Expectation::has_type(elem_ty.clone());
+ let len = match array {
+ Array::ElementList { elements, .. } => {
+ for &expr in elements.iter() {
+ let cur_elem_ty = self.infer_expr_inner(expr, &expected);
+ coerce.coerce(self, Some(expr), &cur_elem_ty);
+ }
+ consteval::usize_const(Some(elements.len() as u128))
+ }
+ &Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
+ self.infer_expr(
+ repeat,
+ &Expectation::has_type(
+ TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
+ ),
+ );
+
+ if let Some(g_def) = self.owner.as_generic_def_id() {
+ let generics = generics(self.db.upcast(), g_def);
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ || generics,
+ DebruijnIndex::INNERMOST,
+ )
+ } else {
+ consteval::usize_const(None)
+ }
+ }
+ };
+
+ TyKind::Array(coerce.complete(), len).intern(Interner)
+ }
+ Expr::Literal(lit) => match lit {
+ Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ Literal::String(..) => {
+ TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner))
+ .intern(Interner)
+ }
+ Literal::ByteString(bs) => {
+ let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
+
+ let len = consteval::usize_const(Some(bs.len() as u128));
+
+ let array_type = TyKind::Array(byte_type, len).intern(Interner);
+ TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
+ }
+ Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(Interner),
+ Literal::Int(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Uint(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Float(_v, ty) => match ty {
+ Some(float_ty) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(*float_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_float_var(),
+ },
+ },
+ Expr::MacroStmts { tail, statements } => {
+ self.infer_block(tgt_expr, statements, *tail, expected)
+ }
+ Expr::Underscore => {
+ // Underscore expressions may only appear in assignee expressions,
+ // which are handled by `infer_assignee_expr()`, so any underscore
+ // expression reaching this branch is an error.
+ self.err_ty()
+ }
+ };
+ // use a new type variable if we got unknown here
+ let ty = self.insert_type_vars_shallow(ty);
+ self.write_expr_ty(tgt_expr, ty.clone());
+ if self.resolve_ty_shallow(&ty).is_never() {
+ // Any expression that produces a value of type `!` must have diverged
+ self.diverges = Diverges::Always;
+ }
+ ty
+ }
+
+ fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
+ if let Some(box_id) = self.resolve_boxed_box() {
+ let table = &mut self.table;
+ let inner_exp = expected
+ .to_option(table)
+ .as_ref()
+ .map(|e| e.as_adt())
+ .flatten()
+ .filter(|(e_adt, _)| e_adt == &box_id)
+ .map(|(_, subts)| {
+ let g = subts.at(Interner, 0);
+ Expectation::rvalue_hint(table, Ty::clone(g.assert_ty_ref(Interner)))
+ })
+ .unwrap_or_else(Expectation::none);
+
+ let inner_ty = self.infer_expr_inner(inner_expr, &inner_exp);
+ TyBuilder::adt(self.db, box_id)
+ .push(inner_ty)
+ .fill_with_defaults(self.db, || self.table.new_type_var())
+ .build()
+ } else {
+ self.err_ty()
+ }
+ }
+
+ pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
+ let is_rest_expr = |expr| {
+ matches!(
+ &self.body[expr],
+ Expr::Range { lhs: None, rhs: None, range_type: RangeOp::Exclusive },
+ )
+ };
+
+ let rhs_ty = self.resolve_ty_shallow(rhs_ty);
+
+ let ty = match &self.body[lhs] {
+ Expr::Tuple { exprs, .. } => {
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = exprs.iter().position(|e| is_rest_expr(*e));
+ let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs)
+ }
+ Expr::Call { callee, args, .. } => {
+ // Tuple structs
+ let path = match &self.body[*callee] {
+ Expr::Path(path) => Some(path),
+ _ => None,
+ };
+
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = args.iter().position(|e| is_rest_expr(*e));
+ let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args)
+ }
+ Expr::Array(Array::ElementList { elements, .. }) => {
+ let elem_ty = match rhs_ty.kind(Interner) {
+ TyKind::Array(st, _) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ // There's no need to handle `..` as it cannot be bound.
+ let sub_exprs = elements.iter().filter(|e| !is_rest_expr(**e));
+
+ for e in sub_exprs {
+ self.infer_assignee_expr(*e, &elem_ty);
+ }
+
+ match rhs_ty.kind(Interner) {
+ TyKind::Array(_, _) => rhs_ty.clone(),
+ // Even when `rhs_ty` is not an array type, this assignee
+ // expression is inferred to be an array (of unknown element
+ // type and length). This should not be just an error type,
+ // because we are to compute the unifiability of this type and
+ // `rhs_ty` in the end of this function to issue type mismatches.
+ _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
+ .intern(Interner),
+ }
+ }
+ Expr::RecordLit { path, fields, .. } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
+
+ self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
+ }
+ Expr::Underscore => rhs_ty.clone(),
+ _ => {
+ // `lhs` is a place expression, a unit struct, or an enum variant.
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+
+ // This is the only branch where this function may coerce any type.
+ // We are returning early to avoid the unifiability check below.
+ let lhs_ty = self.insert_type_vars_shallow(lhs_ty);
+ let ty = match self.coerce(None, &rhs_ty, &lhs_ty) {
+ Ok(ty) => ty,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ lhs.into(),
+ TypeMismatch { expected: rhs_ty.clone(), actual: lhs_ty.clone() },
+ );
+ // `rhs_ty` is returned so no further type mismatches are
+ // reported because of this mismatch.
+ rhs_ty
+ }
+ };
+ self.write_expr_ty(lhs, ty.clone());
+ return ty;
+ }
+ };
+
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &rhs_ty) {
+ self.result
+ .type_mismatches
+ .insert(lhs.into(), TypeMismatch { expected: rhs_ty.clone(), actual: ty.clone() });
+ }
+ self.write_expr_ty(lhs, ty.clone());
+ ty
+ }
+
+ fn infer_overloadable_binop(
+ &mut self,
+ lhs: ExprId,
+ op: BinaryOp,
+ rhs: ExprId,
+ tgt_expr: ExprId,
+ ) -> Ty {
+ let lhs_expectation = Expectation::none();
+ let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
+ let rhs_ty = self.table.new_type_var();
+
+ let func = self.resolve_binop_method(op);
+ let func = match func {
+ Some(func) => func,
+ None => {
+ let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
+ let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
+ return self
+ .builtin_binary_op_return_ty(op, lhs_ty, rhs_ty)
+ .unwrap_or_else(|| self.err_ty());
+ }
+ };
+
+ let subst = TyBuilder::subst_for_def(self.db, func)
+ .push(lhs_ty.clone())
+ .push(rhs_ty.clone())
+ .build();
+ self.write_method_resolution(tgt_expr, func, subst.clone());
+
+ let method_ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
+ self.register_obligations_for_call(&method_ty);
+
+ self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone()));
+
+ let ret_ty = match method_ty.callable_sig(self.db) {
+ Some(sig) => sig.ret().clone(),
+ None => self.err_ty(),
+ };
+
+ let ret_ty = self.normalize_associated_types_in(ret_ty);
+
+ // FIXME: record autoref adjustments
+
+ // use knowledge of built-in binary ops, which can sometimes help inference
+ if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) {
+ self.unify(&builtin_rhs, &rhs_ty);
+ }
+ if let Some(builtin_ret) = self.builtin_binary_op_return_ty(op, lhs_ty, rhs_ty) {
+ self.unify(&builtin_ret, &ret_ty);
+ }
+
+ ret_ty
+ }
+
+ fn infer_block(
+ &mut self,
+ expr: ExprId,
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ expected: &Expectation,
+ ) -> Ty {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer, else_branch } => {
+ let decl_ty = type_ref
+ .as_ref()
+ .map(|tr| self.make_ty(tr))
+ .unwrap_or_else(|| self.err_ty());
+
+ // Always use the declared type when specified
+ let mut ty = decl_ty.clone();
+
+ if let Some(expr) = initializer {
+ let actual_ty =
+ self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
+ if decl_ty.is_unknown() {
+ ty = actual_ty;
+ }
+ }
+
+ if let Some(expr) = else_branch {
+ self.infer_expr_coerce(
+ *expr,
+ &Expectation::has_type(Ty::new(Interner, TyKind::Never)),
+ );
+ }
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ Statement::Expr { expr, .. } => {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ }
+ }
+
+ if let Some(expr) = tail {
+ self.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if self.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ self.table.new_maybe_never_var()
+ } else {
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
+ );
+ }
+ t
+ } else {
+ TyBuilder::unit()
+ }
+ }
+ }
+ }
+
+ fn infer_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ args: &[ExprId],
+ method_name: &Name,
+ generic_args: Option<&GenericArgs>,
+ expected: &Expectation,
+ ) -> Ty {
+ let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ let resolved = method_resolution::lookup_method(
+ &canonicalized_receiver.value,
+ self.db,
+ self.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ method_name,
+ );
+ let (receiver_ty, method_ty, substs) = match resolved {
+ Some((adjust, func)) => {
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, generic_args);
+ self.write_expr_adj(receiver, adjustments);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+ (ty, self.db.value_ty(func.into()), substs)
+ }
+ None => (
+ receiver_ty,
+ Binders::empty(Interner, self.err_ty()),
+ Substitution::empty(Interner),
+ ),
+ };
+ let method_ty = method_ty.substitute(Interner, &substs);
+ self.register_obligations_for_call(&method_ty);
+ let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+ match method_ty.callable_sig(self.db) {
+ Some(sig) => {
+ if !sig.params().is_empty() {
+ (
+ sig.params()[0].clone(),
+ sig.params()[1..].to_vec(),
+ sig.ret().clone(),
+ sig.is_varargs,
+ )
+ } else {
+ (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
+ }
+ }
+ None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+ };
+ self.unify(&formal_receiver_ty, &receiver_ty);
+
+ let expected_inputs =
+ self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone());
+
+ self.check_call_arguments(tgt_expr, args, &expected_inputs, &param_tys, &[], is_varargs);
+ self.normalize_associated_types_in(ret_ty)
+ }
+
+ fn expected_inputs_for_expected_output(
+ &mut self,
+ expected_output: &Expectation,
+ output: Ty,
+ inputs: Vec<Ty>,
+ ) -> Vec<Ty> {
+ if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
+ self.table.fudge_inference(|table| {
+ if table.try_unify(&expected_ty, &output).is_ok() {
+ table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
+ chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner),
+ chalk_ir::VariableKind::Lifetime => {
+ var.to_lifetime(Interner).cast(Interner)
+ }
+ chalk_ir::VariableKind::Const(ty) => {
+ var.to_const(Interner, ty).cast(Interner)
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn check_call_arguments(
+ &mut self,
+ expr: ExprId,
+ args: &[ExprId],
+ expected_inputs: &[Ty],
+ param_tys: &[Ty],
+ skip_indices: &[u32],
+ is_varargs: bool,
+ ) {
+ if args.len() != param_tys.len() + skip_indices.len() && !is_varargs {
+ self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
+ call_expr: expr,
+ expected: param_tys.len() + skip_indices.len(),
+ found: args.len(),
+ });
+ }
+
+ // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
+ // We do this in a pretty awful way: first we type-check any arguments
+ // that are not closures, then we type-check the closures. This is so
+ // that we have more information about the types of arguments when we
+ // type-check the functions. This isn't really the right way to do this.
+ for &check_closures in &[false, true] {
+ let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
+ let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
+ let expected_iter = expected_inputs
+ .iter()
+ .cloned()
+ .chain(param_iter.clone().skip(expected_inputs.len()));
+ for (idx, ((&arg, param_ty), expected_ty)) in
+ args.iter().zip(param_iter).zip(expected_iter).enumerate()
+ {
+ let is_closure = matches!(&self.body[arg], Expr::Closure { .. });
+ if is_closure != check_closures {
+ continue;
+ }
+
+ while skip_indices.peek().map_or(false, |i| *i < idx as u32) {
+ skip_indices.next();
+ }
+ if skip_indices.peek().copied() == Some(idx as u32) {
+ continue;
+ }
+
+ // the difference between param_ty and expected here is that
+ // expected is the parameter when the expected *return* type is
+ // taken into account. So in `let _: &[i32] = identity(&[1, 2])`
+ // the expected type is already `&[i32]`, whereas param_ty is
+ // still an unbound type variable. We don't always want to force
+ // the parameter to coerce to the expected type (for example in
+ // `coerce_unsize_expected_type_4`).
+ let param_ty = self.normalize_associated_types_in(param_ty);
+ let expected = Expectation::rvalue_hint(&mut self.table, expected_ty);
+ // infer with the expected type we have...
+ let ty = self.infer_expr_inner(arg, &expected);
+
+ // then coerce to either the expected type or just the formal parameter type
+ let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) {
+ // if we are coercing to the expectation, unify with the
+ // formal parameter type to connect everything
+ self.unify(&ty, &param_ty);
+ ty
+ } else {
+ param_ty
+ };
+ if !coercion_target.is_unknown() {
+ if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
+ self.result.type_mismatches.insert(
+ arg.into(),
+ TypeMismatch { expected: coercion_target, actual: ty.clone() },
+ );
+ }
+ }
+ }
+ }
+ }
+
+ fn substs_for_method_call(
+ &mut self,
+ def_generics: Generics,
+ generic_args: Option<&GenericArgs>,
+ ) -> Substitution {
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ assert_eq!(self_params, 0); // method shouldn't have another Self param
+ let total_len = parent_params + type_params + const_params + impl_trait_params;
+ let mut substs = Vec::with_capacity(total_len);
+ // Parent arguments are unknown
+ for (id, param) in def_generics.iter_parent() {
+ match param {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner));
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ let ty = self.db.const_param_ty(ConstParamId::from_unchecked(id));
+ substs
+ .push(GenericArgData::Const(self.table.new_const_var(ty)).intern(Interner));
+ }
+ }
+ }
+ // handle provided arguments
+ if let Some(generic_args) = generic_args {
+ // if args are provided, it should be all of them, but we can't rely on that
+ for (arg, kind_id) in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .take(type_params + const_params)
+ .zip(def_generics.iter_id().skip(parent_params))
+ {
+ if let Some(g) = generic_arg_to_chalk(
+ self.db,
+ kind_id,
+ arg,
+ self,
+ |this, type_ref| this.make_ty(type_ref),
+ |this, c, ty| {
+ const_or_path_to_chalk(
+ this.db,
+ &this.resolver,
+ ty,
+ c,
+ ParamLoweringMode::Placeholder,
+ || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()),
+ DebruijnIndex::INNERMOST,
+ )
+ },
+ ) {
+ substs.push(g);
+ }
+ }
+ };
+ for (id, data) in def_generics.iter().skip(substs.len()) {
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner))
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ substs.push(
+ GenericArgData::Const(self.table.new_const_var(
+ self.db.const_param_ty(ConstParamId::from_unchecked(id)),
+ ))
+ .intern(Interner),
+ )
+ }
+ }
+ }
+ assert_eq!(substs.len(), total_len);
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
+ let callable_ty = self.resolve_ty_shallow(callable_ty);
+ if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
+ let def: CallableDefId = from_chalk(self.db, *fn_def);
+ let generic_predicates = self.db.generic_predicates(def.into());
+ for predicate in generic_predicates.iter() {
+ let (predicate, binders) = predicate
+ .clone()
+ .substitute(Interner, parameters)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(predicate.cast(Interner));
+ }
+ // add obligation for trait implementation, if this is a trait method
+ match def {
+ CallableDefId::FunctionId(f) => {
+ if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
+ // construct a TraitRef
+ let substs = crate::subst_prefix(
+ &*parameters,
+ generics(self.db.upcast(), trait_.into()).len(),
+ );
+ self.push_obligation(
+ TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }
+ .cast(Interner),
+ );
+ }
+ }
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
+ }
+ }
+ }
+
+ /// Returns the argument indices to skip.
+ fn check_legacy_const_generics(&mut self, callee: Ty, args: &[ExprId]) -> Box<[u32]> {
+ let (func, subst) = match callee.kind(Interner) {
+ TyKind::FnDef(fn_id, subst) => {
+ let callable = CallableDefId::from_chalk(self.db, *fn_id);
+ let func = match callable {
+ CallableDefId::FunctionId(f) => f,
+ _ => return Default::default(),
+ };
+ (func, subst)
+ }
+ _ => return Default::default(),
+ };
+
+ let data = self.db.function_data(func);
+ if data.legacy_const_generics_indices.is_empty() {
+ return Default::default();
+ }
+
+ // only use legacy const generics if the param count matches with them
+ if data.params.len() + data.legacy_const_generics_indices.len() != args.len() {
+ if args.len() <= data.params.len() {
+ return Default::default();
+ } else {
+ // there are more parameters than there should be without legacy
+ // const params; use them
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ return indices;
+ }
+ }
+
+ // check legacy const parameters
+ for (subst_idx, arg_idx) in data.legacy_const_generics_indices.iter().copied().enumerate() {
+ let arg = match subst.at(Interner, subst_idx).constant(Interner) {
+ Some(c) => c,
+ None => continue, // not a const parameter?
+ };
+ if arg_idx >= args.len() as u32 {
+ continue;
+ }
+ let _ty = arg.data(Interner).ty.clone();
+ let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
+ self.infer_expr(args[arg_idx as usize], &expected);
+ // FIXME: evaluate and unify with the const
+ }
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ indices
+ }
+
+ fn builtin_binary_op_return_ty(&mut self, op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Option<Ty> {
+ let lhs_ty = self.resolve_ty_shallow(&lhs_ty);
+ let rhs_ty = self.resolve_ty_shallow(&rhs_ty);
+ match op {
+ BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => {
+ Some(TyKind::Scalar(Scalar::Bool).intern(Interner))
+ }
+ BinaryOp::Assignment { .. } => Some(TyBuilder::unit()),
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
+ // all integer combinations are valid here
+ if matches!(
+ lhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) && matches!(
+ rhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) {
+ Some(lhs_ty)
+ } else {
+ None
+ }
+ }
+ BinaryOp::ArithOp(_) => match (lhs_ty.kind(Interner), rhs_ty.kind(Interner)) {
+ // (int, int) | (uint, uint) | (float, float)
+ (TyKind::Scalar(Scalar::Int(_)), TyKind::Scalar(Scalar::Int(_)))
+ | (TyKind::Scalar(Scalar::Uint(_)), TyKind::Scalar(Scalar::Uint(_)))
+ | (TyKind::Scalar(Scalar::Float(_)), TyKind::Scalar(Scalar::Float(_))) => {
+ Some(rhs_ty)
+ }
+ // ({int}, int) | ({int}, uint)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ ) => Some(rhs_ty),
+ // (int, {int}) | (uint, {int})
+ (
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ ) => Some(lhs_ty),
+ // ({float} | float)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::Scalar(Scalar::Float(_)),
+ ) => Some(rhs_ty),
+ // (float, {float})
+ (
+ TyKind::Scalar(Scalar::Float(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(lhs_ty),
+ // ({int}, {int}) | ({float}, {float})
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ )
+ | (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(rhs_ty),
+ _ => None,
+ },
+ }
+ }
+
+ fn builtin_binary_op_rhs_expectation(&mut self, op: BinaryOp, lhs_ty: Ty) -> Option<Ty> {
+ Some(match op {
+ BinaryOp::LogicOp(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BinaryOp::Assignment { op: None } => lhs_ty,
+ BinaryOp::CmpOp(CmpOp::Eq { .. }) => match self
+ .resolve_ty_shallow(&lhs_ty)
+ .kind(Interner)
+ {
+ TyKind::Scalar(_) | TyKind::Str => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => return None,
+ BinaryOp::CmpOp(CmpOp::Ord { .. })
+ | BinaryOp::Assignment { op: Some(_) }
+ | BinaryOp::ArithOp(_) => match self.resolve_ty_shallow(&lhs_ty).kind(Interner) {
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ })
+ }
+
+ fn resolve_binop_method(&self, op: BinaryOp) -> Option<FunctionId> {
+ let (name, lang_item) = match op {
+ BinaryOp::LogicOp(_) => return None,
+ BinaryOp::ArithOp(aop) => match aop {
+ ArithOp::Add => (name!(add), name!(add)),
+ ArithOp::Mul => (name!(mul), name!(mul)),
+ ArithOp::Sub => (name!(sub), name!(sub)),
+ ArithOp::Div => (name!(div), name!(div)),
+ ArithOp::Rem => (name!(rem), name!(rem)),
+ ArithOp::Shl => (name!(shl), name!(shl)),
+ ArithOp::Shr => (name!(shr), name!(shr)),
+ ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
+ ArithOp::BitOr => (name!(bitor), name!(bitor)),
+ ArithOp::BitAnd => (name!(bitand), name!(bitand)),
+ },
+ BinaryOp::Assignment { op: Some(aop) } => match aop {
+ ArithOp::Add => (name!(add_assign), name!(add_assign)),
+ ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
+ ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
+ ArithOp::Div => (name!(div_assign), name!(div_assign)),
+ ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
+ ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
+ ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
+ ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
+ ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
+ ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
+ },
+ BinaryOp::CmpOp(cop) => match cop {
+ CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
+ CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
+ (name!(le), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
+ (name!(lt), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
+ (name!(ge), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
+ (name!(gt), name!(partial_ord))
+ }
+ },
+ BinaryOp::Assignment { op: None } => return None,
+ };
+
+ let trait_ = self.resolve_lang_item(lang_item)?.as_trait()?;
+
+ self.db.trait_data(trait_).method_by_name(&name)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
new file mode 100644
index 000000000..5e7320a5d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -0,0 +1,354 @@
+//! Type inference for patterns.
+
+use std::iter::repeat_with;
+
+use chalk_ir::Mutability;
+use hir_def::{
+ expr::{BindingAnnotation, Expr, Literal, Pat, PatId},
+ path::Path,
+ type_ref::ConstScalar,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ consteval::intern_const_scalar,
+ infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
+ lower::lower_to_chalk_mutability,
+ static_lifetime, ConcreteConst, ConstValue, Interner, Substitution, Ty, TyBuilder, TyExt,
+ TyKind,
+};
+
+use super::PatLike;
+
+impl<'a> InferenceContext<'a> {
+ /// Infers type for tuple struct pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_struct_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, true);
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let (pre, post) = match ellipsis {
+ Some(idx) => subs.split_at(idx),
+ None => (subs, &[][..]),
+ };
+ let post_idx_offset = field_tys.iter().count().saturating_sub(post.len());
+
+ let pre_iter = pre.iter().enumerate();
+ let post_iter = (post_idx_offset..).zip(post.iter());
+ for (i, &subpat) in pre_iter.chain(post_iter) {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|d| d.field(&Name::new_tuple_field(i)))
+ .map_or(self.err_ty(), |field| {
+ field_tys[field].clone().substitute(Interner, &substs)
+ });
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+ T::infer(self, subpat, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for record pattern or its corresponding assignee expression.
+ pub(super) fn infer_record_pat_like<T: PatLike>(
+ &mut self,
+ path: Option<&Path>,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ id: T,
+ subs: impl Iterator<Item = (Name, T)>,
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path, false);
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+
+ self.unify(&ty, expected);
+
+ let substs =
+ ty.as_adt().map(|(_, s)| s.clone()).unwrap_or_else(|| Substitution::empty(Interner));
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let var_data = def.map(|it| it.variant_data(self.db.upcast()));
+
+ for (name, inner) in subs {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|it| it.field(&name))
+ .map_or(self.err_ty(), |f| field_tys[f].clone().substitute(Interner, &substs));
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+
+ T::infer(self, inner, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ /// Infers type for tuple pattern or its corresponding assignee expression.
+ ///
+ /// Ellipses found in the original pattern or expression must be filtered out.
+ pub(super) fn infer_tuple_pat_like<T: PatLike>(
+ &mut self,
+ expected: &Ty,
+ default_bm: T::BindingMode,
+ ellipsis: Option<usize>,
+ subs: &[T],
+ ) -> Ty {
+ let expectations = match expected.as_tuple() {
+ Some(parameters) => &*parameters.as_slice(Interner),
+ _ => &[],
+ };
+
+ let ((pre, post), n_uncovered_patterns) = match ellipsis {
+ Some(idx) => (subs.split_at(idx), expectations.len().saturating_sub(subs.len())),
+ None => ((&subs[..], &[][..]), 0),
+ };
+ let mut expectations_iter = expectations
+ .iter()
+ .cloned()
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()));
+
+ let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + subs.len());
+
+ inner_tys.extend(expectations_iter.by_ref().take(n_uncovered_patterns + subs.len()));
+
+ // Process pre
+ for (ty, pat) in inner_tys.iter_mut().zip(pre) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ // Process post
+ for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) {
+ *ty = T::infer(self, *pat, ty, default_bm);
+ }
+
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+
+ pub(super) fn infer_pat(
+ &mut self,
+ pat: PatId,
+ expected: &Ty,
+ mut default_bm: BindingMode,
+ ) -> Ty {
+ let mut expected = self.resolve_ty_shallow(expected);
+
+ if is_non_ref_pat(&self.body, pat) {
+ let mut pat_adjustments = Vec::new();
+ while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
+ pat_adjustments.push(expected.clone());
+ expected = self.resolve_ty_shallow(inner);
+ default_bm = match default_bm {
+ BindingMode::Move => BindingMode::Ref(mutability),
+ BindingMode::Ref(Mutability::Not) => BindingMode::Ref(Mutability::Not),
+ BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
+ }
+ }
+
+ if !pat_adjustments.is_empty() {
+ pat_adjustments.shrink_to_fit();
+ self.result.pat_adjustments.insert(pat, pat_adjustments);
+ }
+ } else if let Pat::Ref { .. } = &self.body[pat] {
+ cov_mark::hit!(match_ergonomics_ref);
+ // When you encounter a `&pat` pattern, reset to Move.
+ // This is so that `w` is by value: `let (_, &w) = &(1, &2);`
+ default_bm = BindingMode::Move;
+ }
+
+ // Lose mutability.
+ let default_bm = default_bm;
+ let expected = expected;
+
+ let ty = match &self.body[pat] {
+ Pat::Tuple { args, ellipsis } => {
+ self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
+ }
+ Pat::Or(pats) => {
+ if let Some((first_pat, rest)) = pats.split_first() {
+ let ty = self.infer_pat(*first_pat, &expected, default_bm);
+ for pat in rest {
+ self.infer_pat(*pat, &expected, default_bm);
+ }
+ ty
+ } else {
+ self.err_ty()
+ }
+ }
+ Pat::Ref { pat, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = match expected.as_reference() {
+ Some((inner_ty, _lifetime, exp_mut)) => {
+ if mutability != exp_mut {
+ // FIXME: emit type error?
+ }
+ inner_ty.clone()
+ }
+ _ => self.result.standard_types.unknown.clone(),
+ };
+ let subty = self.infer_pat(*pat, &expectation, default_bm);
+ TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
+ }
+ Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
+ .infer_tuple_struct_pat_like(
+ p.as_deref(),
+ &expected,
+ default_bm,
+ pat,
+ *ellipsis,
+ subpats,
+ ),
+ Pat::Record { path: p, args: fields, ellipsis: _ } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.pat));
+ self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs)
+ }
+ Pat::Path(path) => {
+ // FIXME use correct resolver for the surrounding expression
+ let resolver = self.resolver.clone();
+ self.infer_path(&resolver, path, pat.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Pat::Bind { mode, name: _, subpat } => {
+ let mode = if mode == &BindingAnnotation::Unannotated {
+ default_bm
+ } else {
+ BindingMode::convert(*mode)
+ };
+ self.result.pat_binding_modes.insert(pat, mode);
+
+ let inner_ty = match subpat {
+ Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
+ None => expected,
+ };
+ let inner_ty = self.insert_type_vars_shallow(inner_ty);
+
+ let bound_ty = match mode {
+ BindingMode::Ref(mutability) => {
+ TyKind::Ref(mutability, static_lifetime(), inner_ty.clone())
+ .intern(Interner)
+ }
+ BindingMode::Move => inner_ty.clone(),
+ };
+ self.write_pat_ty(pat, bound_ty);
+ return inner_ty;
+ }
+ Pat::Slice { prefix, slice, suffix } => {
+ let elem_ty = match expected.kind(Interner) {
+ TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ for &pat_id in prefix.iter().chain(suffix.iter()) {
+ self.infer_pat(pat_id, &elem_ty, default_bm);
+ }
+
+ if let &Some(slice_pat_id) = slice {
+ let rest_pat_ty = match expected.kind(Interner) {
+ TyKind::Array(_, length) => {
+ let len = match length.data(Interner).value {
+ ConstValue::Concrete(ConcreteConst {
+ interned: ConstScalar::UInt(len),
+ }) => len.checked_sub((prefix.len() + suffix.len()) as u128),
+ _ => None,
+ };
+ TyKind::Array(
+ elem_ty.clone(),
+ intern_const_scalar(
+ len.map_or(ConstScalar::Unknown, |len| ConstScalar::UInt(len)),
+ TyBuilder::usize(),
+ ),
+ )
+ }
+ _ => TyKind::Slice(elem_ty.clone()),
+ }
+ .intern(Interner);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
+ }
+
+ match expected.kind(Interner) {
+ TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
+ _ => TyKind::Slice(elem_ty),
+ }
+ .intern(Interner)
+ }
+ Pat::Wild => expected.clone(),
+ Pat::Range { start, end } => {
+ let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
+ self.infer_expr(*end, &Expectation::has_type(start_ty))
+ }
+ Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
+ Pat::Box { inner } => match self.resolve_boxed_box() {
+ Some(box_adt) => {
+ let (inner_ty, alloc_ty) = match expected.as_adt() {
+ Some((adt, subst)) if adt == box_adt => (
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone(),
+ subst.as_slice(Interner).get(1).and_then(|a| a.ty(Interner).cloned()),
+ ),
+ _ => (self.result.standard_types.unknown.clone(), None),
+ };
+
+ let inner_ty = self.infer_pat(*inner, &inner_ty, default_bm);
+ let mut b = TyBuilder::adt(self.db, box_adt).push(inner_ty);
+
+ if let Some(alloc_ty) = alloc_ty {
+ b = b.push(alloc_ty);
+ }
+ b.fill_with_defaults(self.db, || self.table.new_type_var()).build()
+ }
+ None => self.err_ty(),
+ },
+ Pat::ConstBlock(expr) => {
+ self.infer_expr(*expr, &Expectation::has_type(expected.clone()))
+ }
+ Pat::Missing => self.err_ty(),
+ };
+ // use a new type variable if we got error type here
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &expected) {
+ self.result
+ .type_mismatches
+ .insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
+ }
+ self.write_pat_ty(pat, ty.clone());
+ ty
+ }
+}
+
+fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
+ // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
+ Pat::Path(..) => true,
+ Pat::ConstBlock(..) => true,
+ Pat::Lit(expr) => !matches!(body[*expr], Expr::Literal(Literal::String(..))),
+ Pat::Bind {
+ mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated,
+ subpat: Some(subpat),
+ ..
+ } => is_non_ref_pat(body, *subpat),
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
new file mode 100644
index 000000000..f580e09e9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -0,0 +1,295 @@
+//! Path expression resolution.
+
+use chalk_ir::cast::Cast;
+use hir_def::{
+ path::{Path, PathSegment},
+ resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
+ AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
+};
+use hir_expand::name::Name;
+
+use crate::{
+ builder::ParamKind,
+ consteval,
+ method_resolution::{self, VisibleFromModule},
+ GenericArgData, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+ ValueTyDefId,
+};
+
+use super::{ExprOrPatId, InferenceContext, TraitRef};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn infer_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let ty = self.resolve_value_path(resolver, path, id)?;
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ Some(ty)
+ }
+
+ fn resolve_value_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ if path.segments().is_empty() {
+ // This can't actually happen syntax-wise
+ return None;
+ }
+ let ty = self.make_ty(type_ref);
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
+ let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
+ self.resolve_ty_assoc_item(
+ ty,
+ path.segments().last().expect("path had at least one segment").name,
+ id,
+ )?
+ } else {
+ let value_or_partial =
+ resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index) => {
+ self.resolve_assoc_item(def, path, remaining_index, id)?
+ }
+ }
+ };
+
+ let typable: ValueTyDefId = match value {
+ ValueNs::LocalBinding(pat) => {
+ let ty = self.result.type_of_pat.get(pat)?.clone();
+ return Some(ty);
+ }
+ ValueNs::FunctionId(it) => it.into(),
+ ValueNs::ConstId(it) => it.into(),
+ ValueNs::StaticId(it) => it.into(),
+ ValueNs::StructId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::EnumVariantId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::ImplSelf(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = generics.placeholder_subst(self.db);
+ let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
+ if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
+ let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
+ return Some(ty);
+ } else {
+ // FIXME: diagnostic, invalid Self reference
+ return None;
+ }
+ }
+ ValueNs::GenericParam(it) => return Some(self.db.const_param_ty(it)),
+ };
+
+ let parent_substs = self_subst.unwrap_or_else(|| Substitution::empty(Interner));
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let substs = ctx.substs_from_path(path, typable, true);
+ let mut it = substs.as_slice(Interner)[parent_substs.len(Interner)..].iter().cloned();
+ let ty = TyBuilder::value_ty(self.db, typable)
+ .use_parent_substs(&parent_substs)
+ .fill(|x| {
+ it.next().unwrap_or_else(|| match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
+ })
+ })
+ .build();
+ Some(ty)
+ }
+
+ fn resolve_assoc_item(
+ &mut self,
+ def: TypeNs,
+ path: &Path,
+ remaining_index: usize,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ assert!(remaining_index < path.segments().len());
+ // there may be more intermediate segments between the resolved one and
+ // the end. Only the last segment needs to be resolved to a value; from
+ // the segments before that, we need to get either a type or a trait ref.
+
+ let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
+ let remaining_segments = path.segments().skip(remaining_index);
+ let is_before_last = remaining_segments.len() == 1;
+
+ match (def, is_before_last) {
+ (TypeNs::TraitId(trait_), true) => {
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let trait_ref =
+ ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ self.resolve_trait_assoc_item(trait_ref, segment, id)
+ }
+ (def, _) => {
+ // Either we already have a type (e.g. `Vec::new`), or we have a
+ // trait but it's not the last segment, so the next segment
+ // should resolve to an associated type of that trait (e.g. `<T
+ // as Iterator>::Item::default`)
+ let remaining_segments_for_ty =
+ remaining_segments.take(remaining_segments.len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let (ty, _) = ctx.lower_partly_resolved_path(
+ def,
+ resolved_segment,
+ remaining_segments_for_ty,
+ true,
+ );
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+
+ self.resolve_ty_assoc_item(ty, segment.name, id)
+ }
+ }
+ }
+
+ fn resolve_trait_assoc_item(
+ &mut self,
+ trait_ref: TraitRef,
+ segment: PathSegment<'_>,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let trait_ = trait_ref.hir_trait_id();
+ let item =
+ self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
+ match item {
+ AssocItemId::FunctionId(func) => {
+ if segment.name == &self.db.function_data(func).name {
+ Some(AssocItemId::FunctionId(func))
+ } else {
+ None
+ }
+ }
+
+ AssocItemId::ConstId(konst) => {
+ if self
+ .db
+ .const_data(konst)
+ .name
+ .as_ref()
+ .map_or(false, |n| n == segment.name)
+ {
+ Some(AssocItemId::ConstId(konst))
+ } else {
+ None
+ }
+ }
+ AssocItemId::TypeAliasId(_) => None,
+ }
+ })?;
+ let def = match item {
+ AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
+ AssocItemId::ConstId(c) => ValueNs::ConstId(c),
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, Some(trait_ref.substitution)))
+ }
+
+ fn resolve_ty_assoc_item(
+ &mut self,
+ ty: Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ if let TyKind::Error = ty.kind(Interner) {
+ return None;
+ }
+
+ if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
+ return Some(result);
+ }
+
+ let canonical_ty = self.canonicalize(ty.clone());
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ method_resolution::iterate_method_candidates(
+ &canonical_ty.value,
+ self.db,
+ self.table.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ Some(name),
+ method_resolution::LookupMode::Path,
+ move |_ty, item| {
+ let (def, container) = match item {
+ AssocItemId::FunctionId(f) => {
+ (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::ConstId(c) => {
+ (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+ let substs = match container {
+ ItemContainerId::ImplId(impl_id) => {
+ let impl_substs = TyBuilder::subst_for_def(self.db, impl_id)
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ let impl_self_ty =
+ self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
+ self.unify(&impl_self_ty, &ty);
+ Some(impl_substs)
+ }
+ ItemContainerId::TraitId(trait_) => {
+ // we're picking this method
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(ty.clone())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.push_obligation(trait_ref.clone().cast(Interner));
+ Some(trait_ref.substitution)
+ }
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, substs))
+ },
+ )
+ }
+
+ fn resolve_enum_variant_on_ty(
+ &mut self,
+ ty: &Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substitution>)> {
+ let ty = self.resolve_ty_shallow(ty);
+ let (enum_id, subst) = match ty.as_adt() {
+ Some((AdtId::EnumId(e), subst)) => (e, subst),
+ _ => return None,
+ };
+ let enum_data = self.db.enum_data(enum_id);
+ let local_id = enum_data.variant(name)?;
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ self.write_variant_resolution(id, variant.into());
+ Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
new file mode 100644
index 000000000..e77b55670
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -0,0 +1,738 @@
+//! Unification and canonicalization logic.
+
+use std::{fmt, mem, sync::Arc};
+
+use chalk_ir::{
+ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy,
+ IntTy, NoSolution, TyVariableKind, UniverseIndex,
+};
+use chalk_solve::infer::ParameterEnaVariableExt;
+use ena::unify::UnifyKey;
+use hir_expand::name;
+use stdx::never;
+
+use super::{InferOk, InferResult, InferenceContext, TypeError};
+use crate::{
+ db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar,
+ Canonical, Const, DebruijnIndex, GenericArg, GenericArgData, Goal, Guidance, InEnvironment,
+ InferenceVar, Interner, Lifetime, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, Solution,
+ Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ self.table.canonicalize(t)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Canonicalized<T>
+where
+ T: HasInterner<Interner = Interner>,
+{
+ pub(crate) value: Canonical<T>,
+ free_vars: Vec<GenericArg>,
+}
+
+impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
+ pub(super) fn apply_solution(
+ &self,
+ ctx: &mut InferenceTable<'_>,
+ solution: Canonical<Substitution>,
+ ) {
+ // the solution may contain new variables, which we need to convert to new inference vars
+ let new_vars = Substitution::from_iter(
+ Interner,
+ solution.binders.iter(Interner).map(|k| match &k.kind {
+ VariableKind::Ty(TyVariableKind::General) => ctx.new_type_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Integer) => ctx.new_integer_var().cast(Interner),
+ VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
+ // Chalk can sometimes return new lifetime variables. We just use the static lifetime everywhere
+ VariableKind::Lifetime => static_lifetime().cast(Interner),
+ VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
+ }),
+ );
+ for (i, v) in solution.value.iter(Interner).enumerate() {
+ let var = self.free_vars[i].clone();
+ if let Some(ty) = v.ty(Interner) {
+ // eagerly replace projections in the type; we may be getting types
+ // e.g. from where clauses where this hasn't happened yet
+ let ty = ctx.normalize_associated_types_in(new_vars.apply(ty.clone(), Interner));
+ ctx.unify(var.assert_ty_ref(Interner), &ty);
+ } else {
+ let _ = ctx.try_unify(&var, &new_vars.apply(v.clone(), Interner));
+ }
+ }
+ }
+}
+
+pub fn could_unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> bool {
+ unify(db, env, tys).is_some()
+}
+
+pub(crate) fn unify(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ tys: &Canonical<(Ty, Ty)>,
+) -> Option<Substitution> {
+ let mut table = InferenceTable::new(db, env);
+ let vars = Substitution::from_iter(
+ Interner,
+ tys.binders.iter(Interner).map(|x| match &x.kind {
+ chalk_ir::VariableKind::Ty(_) => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ }
+ chalk_ir::VariableKind::Lifetime => {
+ GenericArgData::Ty(table.new_type_var()).intern(Interner)
+ } // FIXME: maybe wrong?
+ chalk_ir::VariableKind::Const(ty) => {
+ GenericArgData::Const(table.new_const_var(ty.clone())).intern(Interner)
+ }
+ }),
+ );
+ let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner);
+ let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
+ if !table.unify(&ty1_with_vars, &ty2_with_vars) {
+ return None;
+ }
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ let find_var = |iv| {
+ vars.iter(Interner).position(|v| match v.interned() {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ } == Some(iv))
+ };
+ let fallback = |iv, kind, default, binder| match kind {
+ chalk_ir::VariableKind::Ty(_ty_kind) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_ty(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Lifetime => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_lifetime(Interner).cast(Interner)),
+ chalk_ir::VariableKind::Const(ty) => find_var(iv)
+ .map_or(default, |i| BoundVar::new(binder, i).to_const(Interner, ty).cast(Interner)),
+ };
+ Some(Substitution::from_iter(
+ Interner,
+ vars.iter(Interner).map(|v| table.resolve_with_fallback(v.clone(), &fallback)),
+ ))
+}
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct TypeVariableData {
+ diverging: bool,
+}
+
+type ChalkInferenceTable = chalk_solve::infer::InferenceTable<Interner>;
+
+#[derive(Clone)]
+pub(crate) struct InferenceTable<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) trait_env: Arc<TraitEnvironment>,
+ var_unification_table: ChalkInferenceTable,
+ type_variable_table: Vec<TypeVariableData>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+}
+
+pub(crate) struct InferenceTableSnapshot {
+ var_table_snapshot: chalk_solve::infer::InferenceSnapshot<Interner>,
+ pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
+ type_variable_table_snapshot: Vec<TypeVariableData>,
+}
+
+impl<'a> InferenceTable<'a> {
+ pub(crate) fn new(db: &'a dyn HirDatabase, trait_env: Arc<TraitEnvironment>) -> Self {
+ InferenceTable {
+ db,
+ trait_env,
+ var_unification_table: ChalkInferenceTable::new(),
+ type_variable_table: Vec::new(),
+ pending_obligations: Vec::new(),
+ }
+ }
+
+ /// Chalk doesn't know about the `diverging` flag, so when it unifies two
+ /// type variables of which one is diverging, the chosen root might not be
+ /// diverging and we have no way of marking it as such at that time. This
+ /// function goes through all type variables and make sure their root is
+ /// marked as diverging if necessary, so that resolving them gives the right
+ /// result.
+ pub(super) fn propagate_diverging_flag(&mut self) {
+ for i in 0..self.type_variable_table.len() {
+ if !self.type_variable_table[i].diverging {
+ continue;
+ }
+ let v = InferenceVar::from(i as u32);
+ let root = self.var_unification_table.inference_var_root(v);
+ if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) {
+ data.diverging = true;
+ }
+ }
+ }
+
+ pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) {
+ self.type_variable_table[iv.index() as usize].diverging = diverging;
+ }
+
+ fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty {
+ match kind {
+ _ if self
+ .type_variable_table
+ .get(iv.index() as usize)
+ .map_or(false, |data| data.diverging) =>
+ {
+ TyKind::Never
+ }
+ TyVariableKind::General => TyKind::Error,
+ TyVariableKind::Integer => TyKind::Scalar(Scalar::Int(IntTy::I32)),
+ TyVariableKind::Float => TyKind::Scalar(Scalar::Float(FloatTy::F64)),
+ }
+ .intern(Interner)
+ }
+
+ pub(crate) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ &mut self,
+ t: T,
+ ) -> Canonicalized<T>
+ where
+ T: HasInterner<Interner = Interner>,
+ {
+ // try to resolve obligations before canonicalizing, since this might
+ // result in new knowledge about variables
+ self.resolve_obligations_as_possible();
+ let result = self.var_unification_table.canonicalize(Interner, t);
+ let free_vars = result
+ .free_vars
+ .into_iter()
+ .map(|free_var| free_var.to_generic_arg(Interner))
+ .collect();
+ Canonicalized { value: result.quantified, free_vars }
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ pub(crate) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ fold_tys(
+ ty,
+ |ty, _| match ty.kind(Interner) {
+ TyKind::Alias(AliasTy::Projection(proj_ty)) => {
+ self.normalize_projection_ty(proj_ty.clone())
+ }
+ _ => ty,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+
+ pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
+ let var = self.new_type_var();
+ let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() };
+ let obligation = alias_eq.cast(Interner);
+ self.register_obligation(obligation);
+ var
+ }
+
+ fn extend_type_variable_table(&mut self, to_index: usize) {
+ self.type_variable_table.extend(
+ (0..1 + to_index - self.type_variable_table.len())
+ .map(|_| TypeVariableData { diverging: false }),
+ );
+ }
+
+ fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ // Chalk might have created some type variables for its own purposes that we don't know about...
+ self.extend_type_variable_table(var.index() as usize);
+ assert_eq!(var.index() as usize, self.type_variable_table.len() - 1);
+ self.type_variable_table[var.index() as usize].diverging = diverging;
+ var.to_ty_with_kind(Interner, kind)
+ }
+
+ pub(crate) fn new_type_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, false)
+ }
+
+ pub(crate) fn new_integer_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Integer, false)
+ }
+
+ pub(crate) fn new_float_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::Float, false)
+ }
+
+ pub(crate) fn new_maybe_never_var(&mut self) -> Ty {
+ self.new_var(TyVariableKind::General, true)
+ }
+
+ pub(crate) fn new_const_var(&mut self, ty: Ty) -> Const {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_const(Interner, ty)
+ }
+
+ pub(crate) fn new_lifetime_var(&mut self) -> Lifetime {
+ let var = self.var_unification_table.new_variable(UniverseIndex::ROOT);
+ var.to_lifetime(Interner)
+ }
+
+ pub(crate) fn resolve_with_fallback<T>(
+ &mut self,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback_inner(&mut Vec::new(), t, &fallback)
+ }
+
+ pub(crate) fn fresh_subst(&mut self, binders: &[CanonicalVarKind<Interner>]) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ binders.iter().map(|kind| {
+ let param_infer_var =
+ kind.map_ref(|&ui| self.var_unification_table.new_variable(ui));
+ param_infer_var.to_generic_arg(Interner)
+ }),
+ )
+ }
+
+ pub(crate) fn instantiate_canonical<T>(&mut self, canonical: Canonical<T>) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + std::fmt::Debug,
+ {
+ let subst = self.fresh_subst(canonical.binders.as_slice(Interner));
+ subst.apply(canonical.value, Interner)
+ }
+
+ fn resolve_with_fallback_inner<T>(
+ &mut self,
+ var_stack: &mut Vec<InferenceVar>,
+ t: T,
+ fallback: &dyn Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
+ ) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ t.fold_with(
+ &mut resolve::Resolver { table: self, var_stack, fallback },
+ DebruijnIndex::INNERMOST,
+ )
+ .expect("fold failed unexpectedly")
+ }
+
+ pub(crate) fn resolve_completely<T>(&mut self, t: T) -> T
+ where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner>,
+ {
+ self.resolve_with_fallback(t, &|_, _, d, _| d)
+ }
+
+ /// Unify two types and register new trait goals that arise from that.
+ pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ let result = match self.try_unify(ty1, ty2) {
+ Ok(r) => r,
+ Err(_) => return false,
+ };
+ self.register_infer_ok(result);
+ true
+ }
+
+ /// Unify two types and return new trait goals arising from it, so the
+ /// caller needs to deal with them.
+ pub(crate) fn try_unify<T: Zip<Interner>>(&mut self, t1: &T, t2: &T) -> InferResult<()> {
+ match self.var_unification_table.relate(
+ Interner,
+ &self.db,
+ &self.trait_env.env,
+ chalk_ir::Variance::Invariant,
+ t1,
+ t2,
+ ) {
+ Ok(result) => Ok(InferOk { goals: result.goals, value: () }),
+ Err(chalk_ir::NoSolution) => Err(TypeError),
+ }
+ }
+
+ /// If `ty` is a type variable with known type, returns that type;
+ /// otherwise, return ty.
+ pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+ self.var_unification_table.normalize_ty_shallow(Interner, ty).unwrap_or_else(|| ty.clone())
+ }
+
+ pub(crate) fn snapshot(&mut self) -> InferenceTableSnapshot {
+ let var_table_snapshot = self.var_unification_table.snapshot();
+ let type_variable_table_snapshot = self.type_variable_table.clone();
+ let pending_obligations = self.pending_obligations.clone();
+ InferenceTableSnapshot {
+ var_table_snapshot,
+ pending_obligations,
+ type_variable_table_snapshot,
+ }
+ }
+
+ pub(crate) fn rollback_to(&mut self, snapshot: InferenceTableSnapshot) {
+ self.var_unification_table.rollback_to(snapshot.var_table_snapshot);
+ self.type_variable_table = snapshot.type_variable_table_snapshot;
+ self.pending_obligations = snapshot.pending_obligations;
+ }
+
+ pub(crate) fn run_in_snapshot<T>(&mut self, f: impl FnOnce(&mut InferenceTable<'_>) -> T) -> T {
+ let snapshot = self.snapshot();
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ }
+
+ /// Checks an obligation without registering it. Useful mostly to check
+ /// whether a trait *might* be implemented before deciding to 'lock in' the
+ /// choice (during e.g. method resolution or deref).
+ pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option<Solution> {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ let canonicalized = self.canonicalize(in_env);
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value);
+ solution
+ }
+
+ pub(crate) fn register_obligation(&mut self, goal: Goal) {
+ let in_env = InEnvironment::new(&self.trait_env.env, goal);
+ self.register_obligation_in_env(in_env)
+ }
+
+ fn register_obligation_in_env(&mut self, goal: InEnvironment<Goal>) {
+ let canonicalized = self.canonicalize(goal);
+ if !self.try_resolve_obligation(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ }
+ }
+
+ pub(crate) fn register_infer_ok<T>(&mut self, infer_ok: InferOk<T>) {
+ infer_ok.goals.into_iter().for_each(|goal| self.register_obligation_in_env(goal));
+ }
+
+ pub(crate) fn resolve_obligations_as_possible(&mut self) {
+ let _span = profile::span("resolve_obligations_as_possible");
+ let mut changed = true;
+ let mut obligations = Vec::new();
+ while changed {
+ changed = false;
+ mem::swap(&mut self.pending_obligations, &mut obligations);
+ for canonicalized in obligations.drain(..) {
+ if !self.check_changed(&canonicalized) {
+ self.pending_obligations.push(canonicalized);
+ continue;
+ }
+ changed = true;
+ let uncanonical = chalk_ir::Substitute::apply(
+ &canonicalized.free_vars,
+ canonicalized.value.value,
+ Interner,
+ );
+ self.register_obligation_in_env(uncanonical);
+ }
+ }
+ }
+
+ pub(crate) fn fudge_inference<T: TypeFoldable<Interner>>(
+ &mut self,
+ f: impl FnOnce(&mut Self) -> T,
+ ) -> T {
+ use chalk_ir::fold::TypeFolder;
+ struct VarFudger<'a, 'b> {
+ table: &'a mut InferenceTable<'b>,
+ highest_known_var: InferenceVar,
+ }
+ impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ kind: TyVariableKind,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Ty<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_ty(Interner, kind)
+ } else {
+ self.table.new_type_var()
+ })
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Lifetime<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_lifetime(Interner)
+ } else {
+ self.table.new_lifetime_var()
+ })
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: chalk_ir::Ty<Interner>,
+ var: chalk_ir::InferenceVar,
+ _outer_binder: chalk_ir::DebruijnIndex,
+ ) -> chalk_ir::Fallible<chalk_ir::Const<Interner>> {
+ Ok(if var < self.highest_known_var {
+ var.to_const(Interner, ty)
+ } else {
+ self.table.new_const_var(ty)
+ })
+ }
+ }
+
+ let snapshot = self.snapshot();
+ let highest_known_var = self.new_type_var().inference_var(Interner).expect("inference_var");
+ let result = f(self);
+ self.rollback_to(snapshot);
+ result
+ .fold_with(&mut VarFudger { table: self, highest_known_var }, DebruijnIndex::INNERMOST)
+ .expect("fold_with with VarFudger")
+ }
+
+ /// This checks whether any of the free variables in the `canonicalized`
+ /// have changed (either been unified with another variable, or with a
+ /// value). If this is not the case, we don't need to try to solve the goal
+ /// again -- it'll give the same result as last time.
+ fn check_changed(&mut self, canonicalized: &Canonicalized<InEnvironment<Goal>>) -> bool {
+ canonicalized.free_vars.iter().any(|var| {
+ let iv = match var.data(Interner) {
+ chalk_ir::GenericArgData::Ty(ty) => ty.inference_var(Interner),
+ chalk_ir::GenericArgData::Lifetime(lt) => lt.inference_var(Interner),
+ chalk_ir::GenericArgData::Const(c) => c.inference_var(Interner),
+ }
+ .expect("free var is not inference var");
+ if self.var_unification_table.probe_var(iv).is_some() {
+ return true;
+ }
+ let root = self.var_unification_table.inference_var_root(iv);
+ iv != root
+ })
+ }
+
+ fn try_resolve_obligation(
+ &mut self,
+ canonicalized: &Canonicalized<InEnvironment<Goal>>,
+ ) -> bool {
+ let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value.clone());
+
+ match solution {
+ Some(Solution::Unique(canonical_subst)) => {
+ canonicalized.apply_solution(
+ self,
+ Canonical {
+ binders: canonical_subst.binders,
+ // FIXME: handle constraints
+ value: canonical_subst.value.subst,
+ },
+ );
+ true
+ }
+ Some(Solution::Ambig(Guidance::Definite(substs))) => {
+ canonicalized.apply_solution(self, substs);
+ false
+ }
+ Some(_) => {
+ // FIXME use this when trying to resolve everything at the end
+ false
+ }
+ None => {
+ // FIXME obligation cannot be fulfilled => diagnostic
+ true
+ }
+ }
+ }
+
+ pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ match ty.callable_sig(self.db) {
+ Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
+ None => self.callable_sig_from_fn_trait(ty, num_args),
+ }
+ }
+
+ fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ let krate = self.trait_env.krate;
+ let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
+ let output_assoc_type =
+ self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
+
+ let mut arg_tys = vec![];
+ let arg_ty = TyBuilder::tuple(num_args)
+ .fill(|x| {
+ let arg = match x {
+ ParamKind::Type => self.new_type_var(),
+ ParamKind::Const(ty) => {
+ never!("Tuple with const parameter");
+ return GenericArgData::Const(self.new_const_var(ty.clone()))
+ .intern(Interner);
+ }
+ };
+ arg_tys.push(arg.clone());
+ GenericArgData::Ty(arg).intern(Interner)
+ })
+ .build();
+
+ let projection = {
+ let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type);
+ if b.remaining() != 2 {
+ return None;
+ }
+ b.push(ty.clone()).push(arg_ty).build()
+ };
+
+ let trait_env = self.trait_env.env.clone();
+ let obligation = InEnvironment {
+ goal: projection.trait_ref(self.db).cast(Interner),
+ environment: trait_env,
+ };
+ let canonical = self.canonicalize(obligation.clone());
+ if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() {
+ self.register_obligation(obligation.goal);
+ let return_ty = self.normalize_projection_ty(projection);
+ Some((arg_tys, return_ty))
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a> fmt::Debug for InferenceTable<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
+ }
+}
+
+mod resolve {
+ use super::InferenceTable;
+ use crate::{
+ ConcreteConst, Const, ConstData, ConstValue, DebruijnIndex, GenericArg, InferenceVar,
+ Interner, Lifetime, Ty, TyVariableKind, VariableKind,
+ };
+ use chalk_ir::{
+ cast::Cast,
+ fold::{TypeFoldable, TypeFolder},
+ Fallible, NoSolution,
+ };
+ use hir_def::type_ref::ConstScalar;
+
+ pub(super) struct Resolver<'a, 'b, F> {
+ pub(super) table: &'a mut InferenceTable<'b>,
+ pub(super) var_stack: &'a mut Vec<InferenceVar>,
+ pub(super) fallback: F,
+ }
+ impl<'a, 'b, 'i, F> TypeFolder<Interner> for Resolver<'a, 'b, F>
+ where
+ F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg + 'i,
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ var: InferenceVar,
+ kind: TyVariableKind,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ if self.var_stack.contains(&var) {
+ // recursive type
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ return Ok((self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_ty_ref(Interner).clone()
+ } else {
+ let default = self.table.fallback_value(var, kind).cast(Interner);
+ (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
+ .assert_ty_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: Ty,
+ var: InferenceVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ let var = self.table.var_unification_table.inference_var_root(var);
+ let default = ConstData {
+ ty: ty.clone(),
+ value: ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Unknown }),
+ }
+ .intern(Interner)
+ .cast(Interner);
+ if self.var_stack.contains(&var) {
+ // recursive
+ return Ok((self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone());
+ }
+ let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+ // known_ty may contain other variables that are known by now
+ self.var_stack.push(var);
+ let result =
+ known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly");
+ self.var_stack.pop();
+ result.assert_const_ref(Interner).clone()
+ } else {
+ (self.fallback)(var, VariableKind::Const(ty), default, outer_binder)
+ .assert_const_ref(Interner)
+ .clone()
+ };
+ Ok(result)
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ // fall back all lifetimes to 'static -- currently we don't deal
+ // with any lifetimes, but we can sometimes get some lifetime
+ // variables through Chalk's unification, and this at least makes
+ // sure we don't leak them outside of inference
+ Ok(crate::static_lifetime())
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
new file mode 100644
index 000000000..ca76e08fd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -0,0 +1,432 @@
+//! Implementation of the Chalk `Interner` trait, which allows customizing the
+//! representation of the various objects Chalk deals with (types, goals etc.).
+
+use crate::{chalk_db, tls, GenericArg};
+use base_db::salsa::InternId;
+use chalk_ir::{Goal, GoalData};
+use hir_def::{
+ intern::{impl_internable, InternStorage, Internable, Interned},
+ type_ref::ConstScalar,
+ TypeAliasId,
+};
+use smallvec::SmallVec;
+use std::{fmt, sync::Arc};
+
+#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
+pub struct Interner;
+
+#[derive(PartialEq, Eq, Hash)]
+pub struct InternedWrapper<T>(T);
+
+impl<T: fmt::Debug> fmt::Debug for InternedWrapper<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+}
+
+impl<T> std::ops::Deref for InternedWrapper<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl_internable!(
+ InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>,
+ InternedWrapper<SmallVec<[GenericArg; 2]>>,
+ InternedWrapper<chalk_ir::TyData<Interner>>,
+ InternedWrapper<chalk_ir::LifetimeData<Interner>>,
+ InternedWrapper<chalk_ir::ConstData<Interner>>,
+ InternedWrapper<ConstScalar>,
+ InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::ProgramClause<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Interner>>>,
+ InternedWrapper<Vec<chalk_ir::Variance>>,
+);
+
+impl chalk_ir::interner::Interner for Interner {
+ type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Interner>>>;
+ type InternedLifetime = Interned<InternedWrapper<chalk_ir::LifetimeData<Self>>>;
+ type InternedConst = Interned<InternedWrapper<chalk_ir::ConstData<Self>>>;
+ type InternedConcreteConst = ConstScalar;
+ type InternedGenericArg = chalk_ir::GenericArgData<Self>;
+ type InternedGoal = Arc<GoalData<Self>>;
+ type InternedGoals = Vec<Goal<Self>>;
+ type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
+ type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
+ type InternedProgramClauses = Interned<InternedWrapper<Vec<chalk_ir::ProgramClause<Self>>>>;
+ type InternedQuantifiedWhereClauses =
+ Interned<InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Self>>>>;
+ type InternedVariableKinds = Interned<InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>>;
+ type InternedCanonicalVarKinds =
+ Interned<InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Self>>>>;
+ type InternedConstraints = Vec<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>>;
+ type InternedVariances = Interned<InternedWrapper<Vec<chalk_ir::Variance>>>;
+ type DefId = InternId;
+ type InternedAdtId = hir_def::AdtId;
+ type Identifier = TypeAliasId;
+ type FnAbi = ();
+
+ fn debug_adt_id(
+ type_kind_id: chalk_db::AdtId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt)))
+ }
+
+ fn debug_trait_id(
+ type_kind_id: chalk_db::TraitId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt)))
+ }
+
+ fn debug_assoc_type_id(
+ id: chalk_db::AssocTypeId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
+ }
+
+ fn debug_alias(
+ alias: &chalk_ir::AliasTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ use std::fmt::Debug;
+ match alias {
+ chalk_ir::AliasTy::Projection(projection_ty) => {
+ Interner::debug_projection_ty(projection_ty, fmt)
+ }
+ chalk_ir::AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)),
+ }
+ }
+
+ fn debug_projection_ty(
+ proj: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
+ }
+
+ fn debug_opaque_ty(
+ opaque_ty: &chalk_ir::OpaqueTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id))
+ }
+
+ fn debug_opaque_ty_id(
+ opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
+ }
+
+ fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", ty.data(Interner)))
+ }
+
+ fn debug_lifetime(
+ lifetime: &chalk_ir::Lifetime<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", lifetime.data(Interner)))
+ }
+
+ fn debug_generic_arg(
+ parameter: &GenericArg,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", parameter.data(Interner).inner_debug()))
+ }
+
+ fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ let goal_data = goal.data(Interner);
+ Some(write!(fmt, "{:?}", goal_data))
+ }
+
+ fn debug_goals(
+ goals: &chalk_ir::Goals<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", goals.debug(Interner)))
+ }
+
+ fn debug_program_clause_implication(
+ pci: &chalk_ir::ProgramClauseImplication<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", pci.debug(Interner)))
+ }
+
+ fn debug_substitution(
+ substitution: &chalk_ir::Substitution<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", substitution.debug(Interner)))
+ }
+
+ fn debug_separator_trait_ref(
+ separator_trait_ref: &chalk_ir::SeparatorTraitRef<'_, Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner)))
+ }
+
+ fn debug_fn_def_id(
+ fn_def_id: chalk_ir::FnDefId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
+ }
+ fn debug_const(
+ constant: &chalk_ir::Const<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", constant.data(Interner)))
+ }
+ fn debug_variable_kinds(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner)))
+ }
+ fn debug_variable_kinds_with_angles(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner)))
+ }
+ fn debug_canonical_var_kinds(
+ canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner)))
+ }
+ fn debug_program_clause(
+ clause: &chalk_ir::ProgramClause<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clause.data(Interner)))
+ }
+ fn debug_program_clauses(
+ clauses: &chalk_ir::ProgramClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
+ }
+ fn debug_quantified_where_clauses(
+ clauses: &chalk_ir::QuantifiedWhereClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
+ }
+
+ fn intern_ty(self, kind: chalk_ir::TyKind<Self>) -> Self::InternedType {
+ let flags = kind.compute_flags(self);
+ Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags }))
+ }
+
+ fn ty_data<'a>(self, ty: &'a Self::InternedType) -> &'a chalk_ir::TyData<Self> {
+ &ty.0
+ }
+
+ fn intern_lifetime(self, lifetime: chalk_ir::LifetimeData<Self>) -> Self::InternedLifetime {
+ Interned::new(InternedWrapper(lifetime))
+ }
+
+ fn lifetime_data<'a>(
+ self,
+ lifetime: &'a Self::InternedLifetime,
+ ) -> &'a chalk_ir::LifetimeData<Self> {
+ &lifetime.0
+ }
+
+ fn intern_const(self, constant: chalk_ir::ConstData<Self>) -> Self::InternedConst {
+ Interned::new(InternedWrapper(constant))
+ }
+
+ fn const_data<'a>(self, constant: &'a Self::InternedConst) -> &'a chalk_ir::ConstData<Self> {
+ &constant.0
+ }
+
+ fn const_eq(
+ self,
+ _ty: &Self::InternedType,
+ c1: &Self::InternedConcreteConst,
+ c2: &Self::InternedConcreteConst,
+ ) -> bool {
+ (c1 == &ConstScalar::Unknown) || (c2 == &ConstScalar::Unknown) || (c1 == c2)
+ }
+
+ fn intern_generic_arg(
+ self,
+ parameter: chalk_ir::GenericArgData<Self>,
+ ) -> Self::InternedGenericArg {
+ parameter
+ }
+
+ fn generic_arg_data<'a>(
+ self,
+ parameter: &'a Self::InternedGenericArg,
+ ) -> &'a chalk_ir::GenericArgData<Self> {
+ parameter
+ }
+
+ fn intern_goal(self, goal: GoalData<Self>) -> Self::InternedGoal {
+ Arc::new(goal)
+ }
+
+ fn intern_goals<E>(
+ self,
+ data: impl IntoIterator<Item = Result<Goal<Self>, E>>,
+ ) -> Result<Self::InternedGoals, E> {
+ data.into_iter().collect()
+ }
+
+ fn goal_data<'a>(self, goal: &'a Self::InternedGoal) -> &'a GoalData<Self> {
+ goal
+ }
+
+ fn goals_data<'a>(self, goals: &'a Self::InternedGoals) -> &'a [Goal<Interner>] {
+ goals
+ }
+
+ fn intern_substitution<E>(
+ self,
+ data: impl IntoIterator<Item = Result<GenericArg, E>>,
+ ) -> Result<Self::InternedSubstitution, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn substitution_data<'a>(
+ self,
+ substitution: &'a Self::InternedSubstitution,
+ ) -> &'a [GenericArg] {
+ &substitution.as_ref().0
+ }
+
+ fn intern_program_clause(
+ self,
+ data: chalk_ir::ProgramClauseData<Self>,
+ ) -> Self::InternedProgramClause {
+ data
+ }
+
+ fn program_clause_data<'a>(
+ self,
+ clause: &'a Self::InternedProgramClause,
+ ) -> &'a chalk_ir::ProgramClauseData<Self> {
+ clause
+ }
+
+ fn intern_program_clauses<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::ProgramClause<Self>, E>>,
+ ) -> Result<Self::InternedProgramClauses, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn program_clauses_data<'a>(
+ self,
+ clauses: &'a Self::InternedProgramClauses,
+ ) -> &'a [chalk_ir::ProgramClause<Self>] {
+ clauses
+ }
+
+ fn intern_quantified_where_clauses<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::QuantifiedWhereClause<Self>, E>>,
+ ) -> Result<Self::InternedQuantifiedWhereClauses, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn quantified_where_clauses_data<'a>(
+ self,
+ clauses: &'a Self::InternedQuantifiedWhereClauses,
+ ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
+ clauses
+ }
+
+ fn intern_generic_arg_kinds<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::VariableKind<Self>, E>>,
+ ) -> Result<Self::InternedVariableKinds, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn variable_kinds_data<'a>(
+ self,
+ parameter_kinds: &'a Self::InternedVariableKinds,
+ ) -> &'a [chalk_ir::VariableKind<Self>] {
+ &parameter_kinds.as_ref().0
+ }
+
+ fn intern_canonical_var_kinds<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::CanonicalVarKind<Self>, E>>,
+ ) -> Result<Self::InternedCanonicalVarKinds, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn canonical_var_kinds_data<'a>(
+ self,
+ canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
+ ) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
+ canonical_var_kinds
+ }
+
+ fn intern_constraints<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>,
+ ) -> Result<Self::InternedConstraints, E> {
+ data.into_iter().collect()
+ }
+
+ fn constraints_data<'a>(
+ self,
+ constraints: &'a Self::InternedConstraints,
+ ) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
+ constraints
+ }
+ fn debug_closure_id(
+ _fn_def_id: chalk_ir::ClosureId<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+ fn debug_constraints(
+ _clauses: &chalk_ir::Constraints<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+
+ fn intern_variances<E>(
+ self,
+ data: impl IntoIterator<Item = Result<chalk_ir::Variance, E>>,
+ ) -> Result<Self::InternedVariances, E> {
+ Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
+ }
+
+ fn variances_data<'a>(
+ self,
+ variances: &'a Self::InternedVariances,
+ ) -> &'a [chalk_ir::Variance] {
+ variances
+ }
+}
+
+impl chalk_ir::interner::HasInterner for Interner {
+ type Interner = Self;
+}
+
+#[macro_export]
+macro_rules! has_interner {
+ ($t:ty) => {
+ impl HasInterner for $t {
+ type Interner = crate::Interner;
+ }
+ };
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
new file mode 100644
index 000000000..5a5d610e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -0,0 +1,525 @@
+//! The type system. We currently use this to infer types for completion, hover
+//! information and various assists.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod autoderef;
+mod builder;
+mod chalk_db;
+mod chalk_ext;
+pub mod consteval;
+mod infer;
+mod interner;
+mod lower;
+mod mapping;
+mod tls;
+mod utils;
+mod walk;
+pub mod db;
+pub mod diagnostics;
+pub mod display;
+pub mod method_resolution;
+pub mod primitive;
+pub mod traits;
+
+#[cfg(test)]
+mod tests;
+#[cfg(test)]
+mod test_db;
+
+use std::sync::Arc;
+
+use chalk_ir::{
+ fold::{Shift, TypeFoldable},
+ interner::HasInterner,
+ NoSolution,
+};
+use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
+use itertools::Either;
+use utils::Generics;
+
+use crate::{consteval::unknown_const, db::HirDatabase, utils::generics};
+
+pub use autoderef::autoderef;
+pub use builder::{ParamKind, TyBuilder};
+pub use chalk_ext::*;
+pub use infer::{
+ could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
+ InferenceResult,
+};
+pub use interner::Interner;
+pub use lower::{
+ associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId,
+ TyLoweringContext, ValueTyDefId,
+};
+pub use mapping::{
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
+ lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id,
+ to_placeholder_idx,
+};
+pub use traits::TraitEnvironment;
+pub use utils::{all_super_traits, is_fn_unsafe_to_call};
+pub use walk::TypeWalk;
+
+pub use chalk_ir::{
+ cast::Cast, AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
+};
+
+pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
+pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub type FnDefId = chalk_ir::FnDefId<Interner>;
+pub type ClosureId = chalk_ir::ClosureId<Interner>;
+pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
+pub type PlaceholderIndex = chalk_ir::PlaceholderIndex;
+
+pub type VariableKind = chalk_ir::VariableKind<Interner>;
+pub type VariableKinds = chalk_ir::VariableKinds<Interner>;
+pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
+pub type Binders<T> = chalk_ir::Binders<T>;
+pub type Substitution = chalk_ir::Substitution<Interner>;
+pub type GenericArg = chalk_ir::GenericArg<Interner>;
+pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
+
+pub type Ty = chalk_ir::Ty<Interner>;
+pub type TyKind = chalk_ir::TyKind<Interner>;
+pub type DynTy = chalk_ir::DynTy<Interner>;
+pub type FnPointer = chalk_ir::FnPointer<Interner>;
+// pub type FnSubst = chalk_ir::FnSubst<Interner>;
+pub use chalk_ir::FnSubst;
+pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
+pub type AliasTy = chalk_ir::AliasTy<Interner>;
+pub type OpaqueTy = chalk_ir::OpaqueTy<Interner>;
+pub type InferenceVar = chalk_ir::InferenceVar;
+
+pub type Lifetime = chalk_ir::Lifetime<Interner>;
+pub type LifetimeData = chalk_ir::LifetimeData<Interner>;
+pub type LifetimeOutlives = chalk_ir::LifetimeOutlives<Interner>;
+
+pub type Const = chalk_ir::Const<Interner>;
+pub type ConstData = chalk_ir::ConstData<Interner>;
+pub type ConstValue = chalk_ir::ConstValue<Interner>;
+pub type ConcreteConst = chalk_ir::ConcreteConst<Interner>;
+
+pub type ChalkTraitId = chalk_ir::TraitId<Interner>;
+pub type TraitRef = chalk_ir::TraitRef<Interner>;
+pub type QuantifiedWhereClause = Binders<WhereClause>;
+pub type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses<Interner>;
+pub type Canonical<T> = chalk_ir::Canonical<T>;
+
+pub type FnSig = chalk_ir::FnSig<Interner>;
+
+pub type InEnvironment<T> = chalk_ir::InEnvironment<T>;
+pub type Environment = chalk_ir::Environment<Interner>;
+pub type DomainGoal = chalk_ir::DomainGoal<Interner>;
+pub type Goal = chalk_ir::Goal<Interner>;
+pub type AliasEq = chalk_ir::AliasEq<Interner>;
+pub type Solution = chalk_solve::Solution<Interner>;
+pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
+pub type Guidance = chalk_solve::Guidance<Interner>;
+pub type WhereClause = chalk_ir::WhereClause<Interner>;
+
+// FIXME: get rid of this
+pub fn subst_prefix(s: &Substitution, n: usize) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ s.as_slice(Interner)[..std::cmp::min(s.len(Interner), n)].iter().cloned(),
+ )
+}
+
+/// Return an index of a parameter in the generic type parameter list by it's id.
+pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
+ generics(db.upcast(), id.parent).param_idx(id)
+}
+
+pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
+where
+ T: TypeFoldable<Interner> + HasInterner<Interner = Interner>,
+{
+ Binders::empty(Interner, value.shifted_in_from(Interner, DebruijnIndex::ONE))
+}
+
+pub(crate) fn make_type_and_const_binders<T: HasInterner<Interner = Interner>>(
+ which_is_const: impl Iterator<Item = Option<Ty>>,
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ which_is_const.map(|x| {
+ if let Some(ty) = x {
+ chalk_ir::VariableKind::Const(ty)
+ } else {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ }),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_single_type_binders<T: HasInterner<Interner = Interner>>(
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ std::iter::once(chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ count: usize,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ let it = generics.iter_id().take(count).map(|id| match id {
+ Either::Left(_) => None,
+ Either::Right(id) => Some(db.const_param_ty(id)),
+ });
+ crate::make_type_and_const_binders(it, value)
+}
+
+pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ make_binders_with_count(db, usize::MAX, generics, value)
+}
+
+// FIXME: get rid of this
+pub fn make_canonical<T: HasInterner<Interner = Interner>>(
+ value: T,
+ kinds: impl IntoIterator<Item = TyVariableKind>,
+) -> Canonical<T> {
+ let kinds = kinds.into_iter().map(|tk| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(tk),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
+}
+
+// FIXME: get rid of this, just replace it by FnPointer
+/// A function signature as seen by type inference: Several parameter types and
+/// one return type.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct CallableSig {
+ params_and_return: Arc<[Ty]>,
+ is_varargs: bool,
+}
+
+has_interner!(CallableSig);
+
+/// A polymorphic function signature.
+pub type PolyFnSig = Binders<CallableSig>;
+
+impl CallableSig {
+ pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty, is_varargs: bool) -> CallableSig {
+ params.push(ret);
+ CallableSig { params_and_return: params.into(), is_varargs }
+ }
+
+ pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
+ CallableSig {
+ // FIXME: what to do about lifetime params? -> return PolyFnSig
+ params_and_return: fn_ptr
+ .substitution
+ .clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("unexpected lifetime vars in fn ptr")
+ .0
+ .as_slice(Interner)
+ .iter()
+ .map(|arg| arg.assert_ty_ref(Interner).clone())
+ .collect(),
+ is_varargs: fn_ptr.sig.variadic,
+ }
+ }
+
+ pub fn to_fn_ptr(&self) -> FnPointer {
+ FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: Safety::Safe, variadic: self.is_varargs },
+ substitution: FnSubst(Substitution::from_iter(
+ Interner,
+ self.params_and_return.iter().cloned(),
+ )),
+ }
+ }
+
+ pub fn params(&self) -> &[Ty] {
+ &self.params_and_return[0..self.params_and_return.len() - 1]
+ }
+
+ pub fn ret(&self) -> &Ty {
+ &self.params_and_return[self.params_and_return.len() - 1]
+ }
+}
+
+impl TypeFoldable<Interner> for CallableSig {
+ fn fold_with<E>(
+ self,
+ folder: &mut dyn chalk_ir::fold::TypeFolder<Interner, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self, E> {
+ let vec = self.params_and_return.to_vec();
+ let folded = vec.fold_with(folder, outer_binder)?;
+ Ok(CallableSig { params_and_return: folded.into(), is_varargs: self.is_varargs })
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum ImplTraitId {
+ ReturnTypeImplTrait(hir_def::FunctionId, u16),
+ AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ReturnTypeImplTraits {
+ pub(crate) impl_traits: Vec<ReturnTypeImplTrait>,
+}
+
+has_interner!(ReturnTypeImplTraits);
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct ReturnTypeImplTrait {
+ pub(crate) bounds: Binders<Vec<QuantifiedWhereClause>>,
+}
+
+pub fn static_lifetime() -> Lifetime {
+ LifetimeData::Static.intern(Interner)
+}
+
+pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ for_ty: impl FnMut(BoundVar, DebruijnIndex) -> Ty,
+ for_const: impl FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
+) -> T {
+ use chalk_ir::{fold::TypeFolder, Fallible};
+ struct FreeVarFolder<F1, F2>(F1, F2);
+ impl<
+ 'i,
+ F1: FnMut(BoundVar, DebruijnIndex) -> Ty + 'i,
+ F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const + 'i,
+ > TypeFolder<Interner> for FreeVarFolder<F1, F2>
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ Ok(self.0(bound_var, outer_binder))
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ Ok(self.1(ty, bound_var, outer_binder))
+ }
+ }
+ t.fold_with(&mut FreeVarFolder(for_ty, for_const), DebruijnIndex::INNERMOST)
+ .expect("fold failed unexpectedly")
+}
+
+pub(crate) fn fold_tys<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ mut for_ty: impl FnMut(Ty, DebruijnIndex) -> Ty,
+ binders: DebruijnIndex,
+) -> T {
+ fold_tys_and_consts(
+ t,
+ |x, d| match x {
+ Either::Left(x) => Either::Left(for_ty(x, d)),
+ Either::Right(x) => Either::Right(x),
+ },
+ binders,
+ )
+}
+
+pub(crate) fn fold_tys_and_consts<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ f: impl FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>,
+ binders: DebruijnIndex,
+) -> T {
+ use chalk_ir::{
+ fold::{TypeFolder, TypeSuperFoldable},
+ Fallible,
+ };
+ struct TyFolder<F>(F);
+ impl<'i, F: FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const> + 'i>
+ TypeFolder<Interner> for TyFolder<F>
+ {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
+ let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?;
+ Ok(self.0(Either::Left(ty), outer_binder).left().unwrap())
+ }
+
+ fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Fallible<Const> {
+ Ok(self.0(Either::Right(c), outer_binder).right().unwrap())
+ }
+ }
+ t.fold_with(&mut TyFolder(f), binders).expect("fold failed unexpectedly")
+}
+
+/// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
+/// ensures there are no unbound variables or inference variables anywhere in
+/// the `t`.
+pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
+where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
+ T: HasInterner<Interner = Interner>,
+{
+ use chalk_ir::{
+ fold::{TypeFolder, TypeSuperFoldable},
+ Fallible,
+ };
+ struct ErrorReplacer {
+ vars: usize,
+ }
+ impl TypeFolder<Interner> for ErrorReplacer {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
+ if let TyKind::Error = ty.kind(Interner) {
+ let index = self.vars;
+ self.vars += 1;
+ Ok(TyKind::BoundVar(BoundVar::new(outer_binder, index)).intern(Interner))
+ } else {
+ let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?;
+ Ok(ty)
+ }
+ }
+
+ fn fold_inference_ty(
+ &mut self,
+ _var: InferenceVar,
+ _kind: TyVariableKind,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn fold_free_var_ty(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn fold_inference_const(
+ &mut self,
+ ty: Ty,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+
+ fn fold_free_var_lifetime(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+ }
+ let mut error_replacer = ErrorReplacer { vars: 0 };
+ let value = match t.clone().fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) {
+ Ok(t) => t,
+ Err(_) => panic!("Encountered unbound or inference vars in {:?}", t),
+ };
+ let kinds = (0..error_replacer.vars).map(|_| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(TyVariableKind::General),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
new file mode 100644
index 000000000..3ed9c941f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -0,0 +1,1778 @@
+//! Methods for lowering the HIR to types. There are two main cases here:
+//!
+//! - Lowering a type reference like `&usize` or `Option<foo::bar::Baz>` to a
+//! type: The entry point for this is `Ty::from_hir`.
+//! - Building the type for an item: This happens through the `type_for_def` query.
+//!
+//! This usually involves resolving names, collecting generic arguments etc.
+use std::{
+ cell::{Cell, RefCell},
+ iter,
+ sync::Arc,
+};
+
+use base_db::CrateId;
+use chalk_ir::{
+ cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
+};
+
+use hir_def::{
+ adt::StructKind,
+ body::{Expander, LowerCtx},
+ builtin_type::BuiltinType,
+ generics::{
+ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ },
+ intern::Interned,
+ lang_item::lang_attr,
+ path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
+ resolver::{HasResolver, Resolver, TypeNs},
+ type_ref::{
+ ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
+ },
+ AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
+ HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
+};
+use hir_expand::{name::Name, ExpandResult};
+use itertools::Either;
+use la_arena::ArenaMap;
+use rustc_hash::FxHashSet;
+use smallvec::SmallVec;
+use stdx::{impl_from, never};
+use syntax::{ast, SmolStr};
+
+use crate::{
+ all_super_traits,
+ consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
+ db::HirDatabase,
+ make_binders,
+ mapping::ToChalk,
+ static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
+ utils::Generics,
+ utils::{all_super_trait_refs, associated_type_by_name_including_super_traits, generics},
+ AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnPointer,
+ FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy,
+ QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits,
+ Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
+};
+
+#[derive(Debug)]
+pub struct TyLoweringContext<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub resolver: &'a Resolver,
+ in_binders: DebruijnIndex,
+ /// Note: Conceptually, it's thinkable that we could be in a location where
+ /// some type params should be represented as placeholders, and others
+ /// should be converted to variables. I think in practice, this isn't
+ /// possible currently, so this should be fine for now.
+ pub type_param_mode: ParamLoweringMode,
+ pub impl_trait_mode: ImplTraitLoweringMode,
+ impl_trait_counter: Cell<u16>,
+ /// When turning `impl Trait` into opaque types, we have to collect the
+ /// bounds at the same time to get the IDs correct (without becoming too
+ /// complicated). I don't like using interior mutability (as for the
+ /// counter), but I've tried and failed to make the lifetimes work for
+ /// passing around a `&mut TyLoweringContext`. The core problem is that
+ /// we're grouping the mutable data (the counter and this field) together
+ /// with the immutable context (the references to the DB and resolver).
+ /// Splitting this up would be a possible fix.
+ opaque_type_data: RefCell<Vec<ReturnTypeImplTrait>>,
+ expander: RefCell<Option<Expander>>,
+ /// Tracks types with explicit `?Sized` bounds.
+ pub(crate) unsized_types: RefCell<FxHashSet<Ty>>,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
+ let impl_trait_counter = Cell::new(0);
+ let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
+ let type_param_mode = ParamLoweringMode::Placeholder;
+ let in_binders = DebruijnIndex::INNERMOST;
+ let opaque_type_data = RefCell::new(Vec::new());
+ Self {
+ db,
+ resolver,
+ in_binders,
+ impl_trait_mode,
+ impl_trait_counter,
+ type_param_mode,
+ opaque_type_data,
+ expander: RefCell::new(None),
+ unsized_types: RefCell::default(),
+ }
+ }
+
+ pub fn with_debruijn<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ let opaque_ty_data_vec = self.opaque_type_data.take();
+ let expander = self.expander.take();
+ let unsized_types = self.unsized_types.take();
+ let new_ctx = Self {
+ in_binders: debruijn,
+ impl_trait_counter: Cell::new(self.impl_trait_counter.get()),
+ opaque_type_data: RefCell::new(opaque_ty_data_vec),
+ expander: RefCell::new(expander),
+ unsized_types: RefCell::new(unsized_types),
+ ..*self
+ };
+ let result = f(&new_ctx);
+ self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
+ self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
+ self.expander.replace(new_ctx.expander.into_inner());
+ self.unsized_types.replace(new_ctx.unsized_types.into_inner());
+ result
+ }
+
+ pub fn with_shifted_in<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
+ }
+
+ pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
+ Self { impl_trait_mode, ..self }
+ }
+
+ pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
+ Self { type_param_mode, ..self }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ImplTraitLoweringMode {
+ /// `impl Trait` gets lowered into an opaque type that doesn't unify with
+ /// anything except itself. This is used in places where values flow 'out',
+ /// i.e. for arguments of the function we're currently checking, and return
+ /// types of functions we're calling.
+ Opaque,
+ /// `impl Trait` gets lowered into a type variable. Used for argument
+ /// position impl Trait when inside the respective function, since it allows
+ /// us to support that without Chalk.
+ Param,
+ /// `impl Trait` gets lowered into a variable that can unify with some
+ /// type. This is used in places where values flow 'in', i.e. for arguments
+ /// of functions we're calling, and the return type of the function we're
+ /// currently checking.
+ Variable,
+ /// `impl Trait` is disallowed and will be an error.
+ Disallowed,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ParamLoweringMode {
+ Placeholder,
+ Variable,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn lower_ty(&self, type_ref: &TypeRef) -> Ty {
+ self.lower_ty_ext(type_ref).0
+ }
+
+ fn generics(&self) -> Generics {
+ generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ )
+ }
+
+ pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
+ let mut res = None;
+ let ty = match type_ref {
+ TypeRef::Never => TyKind::Never.intern(Interner),
+ TypeRef::Tuple(inner) => {
+ let inner_tys = inner.iter().map(|tr| self.lower_ty(tr));
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+ TypeRef::Path(path) => {
+ let (ty, res_) = self.lower_path(path);
+ res = res_;
+ ty
+ }
+ TypeRef::RawPtr(inner, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(Interner)
+ }
+ TypeRef::Array(inner, len) => {
+ let inner_ty = self.lower_ty(inner);
+ let const_len = const_or_path_to_chalk(
+ self.db,
+ self.resolver,
+ TyBuilder::usize(),
+ len,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ );
+
+ TyKind::Array(inner_ty, const_len).intern(Interner)
+ }
+ TypeRef::Slice(inner) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Slice(inner_ty).intern(Interner)
+ }
+ TypeRef::Reference(inner, _, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ let lifetime = static_lifetime();
+ TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty)
+ .intern(Interner)
+ }
+ TypeRef::Placeholder => TyKind::Error.intern(Interner),
+ TypeRef::Fn(params, is_varargs) => {
+ let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr)))
+ });
+ TyKind::Function(FnPointer {
+ num_binders: 0, // FIXME lower `for<'a> fn()` correctly
+ sig: FnSig { abi: (), safety: Safety::Safe, variadic: *is_varargs },
+ substitution: FnSubst(substs),
+ })
+ .intern(Interner)
+ }
+ TypeRef::DynTrait(bounds) => {
+ let self_ty =
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)),
+ )
+ });
+ let bounds = crate::make_single_type_binders(bounds);
+ TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
+ }
+ TypeRef::ImplTrait(bounds) => {
+ match self.impl_trait_mode {
+ ImplTraitLoweringMode::Opaque => {
+ let idx = self.impl_trait_counter.get();
+ self.impl_trait_counter.set(idx + 1);
+ let func = match self.resolver.generic_def() {
+ Some(GenericDefId::FunctionId(f)) => f,
+ _ => panic!("opaque impl trait lowering in non-function"),
+ };
+
+ assert!(idx as usize == self.opaque_type_data.borrow().len());
+ // this dance is to make sure the data is in the right
+ // place even if we encounter more opaque types while
+ // lowering the bounds
+ self.opaque_type_data.borrow_mut().push(ReturnTypeImplTrait {
+ bounds: crate::make_single_type_binders(Vec::new()),
+ });
+ // We don't want to lower the bounds inside the binders
+ // we're currently in, because they don't end up inside
+ // those binders. E.g. when we have `impl Trait<impl
+ // OtherTrait<T>>`, the `impl OtherTrait<T>` can't refer
+ // to the self parameter from `impl Trait`, and the
+ // bounds aren't actually stored nested within each
+ // other, but separately. So if the `T` refers to a type
+ // parameter of the outer function, it's just one binder
+ // away instead of two.
+ let actual_opaque_type_data = self
+ .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
+ ctx.lower_impl_trait(bounds, func)
+ });
+ self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
+
+ let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ let generics = generics(self.db.upcast(), func.into());
+ let parameters = generics.bound_vars_subst(self.db, self.in_binders);
+ TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
+ }
+ ImplTraitLoweringMode::Param => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ let param = generics
+ .iter()
+ .filter(|(_, data)| {
+ matches!(
+ data,
+ TypeOrConstParamData::TypeParamData(data)
+ if data.provenance == TypeParamProvenance::ArgumentImplTrait
+ )
+ })
+ .nth(idx as usize)
+ .map_or(TyKind::Error, |(id, _)| {
+ TyKind::Placeholder(to_placeholder_idx(self.db, id))
+ });
+ param.intern(Interner)
+ } else {
+ TyKind::Error.intern(Interner)
+ }
+ }
+ ImplTraitLoweringMode::Variable => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ let (
+ parent_params,
+ self_params,
+ list_params,
+ const_params,
+ _impl_trait_params,
+ ) = if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ generics.provenance_split()
+ } else {
+ (0, 0, 0, 0, 0)
+ };
+ TyKind::BoundVar(BoundVar::new(
+ self.in_binders,
+ idx as usize + parent_params + self_params + list_params + const_params,
+ ))
+ .intern(Interner)
+ }
+ ImplTraitLoweringMode::Disallowed => {
+ // FIXME: report error
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ TypeRef::Macro(macro_call) => {
+ let (expander, recursion_start) = {
+ let mut expander = self.expander.borrow_mut();
+ if expander.is_some() {
+ (Some(expander), false)
+ } else {
+ *expander = Some(Expander::new(
+ self.db.upcast(),
+ macro_call.file_id,
+ self.resolver.module(),
+ ));
+ (Some(expander), true)
+ }
+ };
+ let ty = if let Some(mut expander) = expander {
+ let expander_mut = expander.as_mut().unwrap();
+ let macro_call = macro_call.to_node(self.db.upcast());
+ match expander_mut.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
+ Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
+ let ctx =
+ LowerCtx::new(self.db.upcast(), expander_mut.current_file_id());
+ let type_ref = TypeRef::from_ast(&ctx, expanded);
+
+ drop(expander);
+ let ty = self.lower_ty(&type_ref);
+
+ self.expander
+ .borrow_mut()
+ .as_mut()
+ .unwrap()
+ .exit(self.db.upcast(), mark);
+ Some(ty)
+ }
+ _ => None,
+ }
+ } else {
+ None
+ };
+ if recursion_start {
+ *self.expander.borrow_mut() = None;
+ }
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+ TypeRef::Error => TyKind::Error.intern(Interner),
+ };
+ (ty, res)
+ }
+
+ /// This is only for `generic_predicates_for_param`, where we can't just
+ /// lower the self types of the predicates since that could lead to cycles.
+ /// So we just check here if the `type_ref` resolves to a generic param, and which.
+ fn lower_ty_only_param(&self, type_ref: &TypeRef) -> Option<TypeOrConstParamId> {
+ let path = match type_ref {
+ TypeRef::Path(path) => path,
+ _ => return None,
+ };
+ if path.type_anchor().is_some() {
+ return None;
+ }
+ if path.segments().len() > 1 {
+ return None;
+ }
+ let resolution =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some((it, None)) => it,
+ _ => return None,
+ };
+ match resolution {
+ TypeNs::GenericParam(param_id) => Some(param_id.into()),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn lower_ty_relative_path(
+ &self,
+ ty: Ty,
+ // We need the original resolution to lower `Self::AssocTy` correctly
+ res: Option<TypeNs>,
+ remaining_segments: PathSegments<'_>,
+ ) -> (Ty, Option<TypeNs>) {
+ match remaining_segments.len() {
+ 0 => (ty, res),
+ 1 => {
+ // resolve unselected assoc types
+ let segment = remaining_segments.first().unwrap();
+ (self.select_associated_type(res, segment), None)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ pub(crate) fn lower_partly_resolved_path(
+ &self,
+ resolution: TypeNs,
+ resolved_segment: PathSegment<'_>,
+ remaining_segments: PathSegments<'_>,
+ infer_args: bool,
+ ) -> (Ty, Option<TypeNs>) {
+ let ty = match resolution {
+ TypeNs::TraitId(trait_) => {
+ let ty = match remaining_segments.len() {
+ 1 => {
+ let trait_ref =
+ self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ let segment = remaining_segments.first().unwrap();
+ let found = self
+ .db
+ .trait_data(trait_ref.hir_trait_id())
+ .associated_type_by_name(segment.name);
+ match found {
+ Some(associated_ty) => {
+ // FIXME handle type parameters on the segment
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: trait_ref.substitution,
+ }))
+ .intern(Interner)
+ }
+ None => {
+ // FIXME: report error (associated type not found)
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ 0 => {
+ let self_ty = Some(
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ );
+ let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ ctx.lower_trait_ref_from_resolved_path(
+ trait_,
+ resolved_segment,
+ self_ty,
+ )
+ });
+ let dyn_ty = DynTy {
+ bounds: crate::make_single_type_binders(
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ Some(crate::wrap_empty_binders(WhereClause::Implemented(
+ trait_ref,
+ ))),
+ ),
+ ),
+ lifetime: static_lifetime(),
+ };
+ TyKind::Dyn(dyn_ty).intern(Interner)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ TyKind::Error.intern(Interner)
+ }
+ };
+ return (ty, None);
+ }
+ TypeNs::GenericParam(param_id) => {
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver.generic_def().expect("generics in scope"),
+ );
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
+ }
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id.into()).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = generics(self.db.upcast(), impl_id.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.impl_self_ty(impl_id).substitute(Interner, &substs)
+ }
+ TypeNs::AdtSelfType(adt) => {
+ let generics = generics(self.db.upcast(), adt.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.ty(adt.into()).substitute(Interner, &substs)
+ }
+
+ TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
+ TypeNs::BuiltinType(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ TypeNs::TypeAliasId(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ // FIXME: report error
+ TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
+ };
+ self.lower_ty_relative_path(ty, Some(resolution), remaining_segments)
+ }
+
+ pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
+ // Resolve the path (in type namespace)
+ if let Some(type_ref) = path.type_anchor() {
+ let (ty, res) = self.lower_ty_ext(type_ref);
+ return self.lower_ty_relative_path(ty, res, path.segments());
+ }
+ let (resolution, remaining_index) =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (TyKind::Error.intern(Interner), None),
+ };
+ let (resolved_segment, remaining_segments) = match remaining_index {
+ None => (
+ path.segments().last().expect("resolved path has at least one element"),
+ PathSegments::EMPTY,
+ ),
+ Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
+ };
+ self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false)
+ }
+
+ fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
+ let (def, res) = match (self.resolver.generic_def(), res) {
+ (Some(def), Some(res)) => (def, res),
+ _ => return TyKind::Error.intern(Interner),
+ };
+ let ty = named_associated_type_shorthand_candidates(
+ self.db,
+ def,
+ res,
+ Some(segment.name.clone()),
+ move |name, t, associated_ty| {
+ if name == segment.name {
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ // if we're lowering to placeholders, we have to put
+ // them in now
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ );
+ let s = generics.placeholder_subst(self.db);
+ s.apply(t.substitution.clone(), Interner)
+ }
+ ParamLoweringMode::Variable => t.substitution.clone(),
+ };
+ // We need to shift in the bound vars, since
+ // associated_type_shorthand_candidates does not do that
+ let substs = substs.shifted_in_from(Interner, self.in_binders);
+ // FIXME handle type parameters on the segment
+ Some(
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: substs,
+ }))
+ .intern(Interner),
+ )
+ } else {
+ None
+ }
+ },
+ );
+
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+
+ fn lower_path_inner(
+ &self,
+ segment: PathSegment<'_>,
+ typeable: TyDefId,
+ infer_args: bool,
+ ) -> Ty {
+ let generic_def = match typeable {
+ TyDefId::BuiltinType(_) => None,
+ TyDefId::AdtId(it) => Some(it.into()),
+ TyDefId::TypeAliasId(it) => Some(it.into()),
+ };
+ let substs = self.substs_from_path_segment(segment, generic_def, infer_args, None);
+ self.db.ty(typeable).substitute(Interner, &substs)
+ }
+
+ /// Collect generic arguments from a path into a `Substs`. See also
+ /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
+ pub(super) fn substs_from_path(
+ &self,
+ path: &Path,
+ // Note that we don't call `db.value_type(resolved)` here,
+ // `ValueTyDefId` is just a convenient way to pass generics and
+ // special-case enum variants
+ resolved: ValueTyDefId,
+ infer_args: bool,
+ ) -> Substitution {
+ let last = path.segments().last().expect("path should have at least one segment");
+ let (segment, generic_def) = match resolved {
+ ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
+ ValueTyDefId::StructId(it) => (last, Some(it.into())),
+ ValueTyDefId::UnionId(it) => (last, Some(it.into())),
+ ValueTyDefId::ConstId(it) => (last, Some(it.into())),
+ ValueTyDefId::StaticId(_) => (last, None),
+ ValueTyDefId::EnumVariantId(var) => {
+ // the generic args for an enum variant may be either specified
+ // on the segment referring to the enum, or on the segment
+ // referring to the variant. So `Option::<T>::None` and
+ // `Option::None::<T>` are both allowed (though the former is
+ // preferred). See also `def_ids_for_path_segments` in rustc.
+ let len = path.segments().len();
+ let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
+ let segment = match penultimate {
+ Some(segment) if segment.args_and_bindings.is_some() => segment,
+ _ => last,
+ };
+ (segment, Some(var.parent.into()))
+ }
+ };
+ self.substs_from_path_segment(segment, generic_def, infer_args, None)
+ }
+
+ fn substs_from_path_segment(
+ &self,
+ segment: PathSegment<'_>,
+ def_generic: Option<GenericDefId>,
+ infer_args: bool,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ let mut substs = Vec::new();
+ let def_generics = if let Some(def) = def_generic {
+ generics(self.db.upcast(), def)
+ } else {
+ return Substitution::empty(Interner);
+ };
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ let total_len =
+ parent_params + self_params + type_params + const_params + impl_trait_params;
+
+ let ty_error = GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner);
+
+ let mut def_generic_iter = def_generics.iter_id();
+
+ for _ in 0..parent_params {
+ if let Some(eid) = def_generic_iter.next() {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ }
+
+ let fill_self_params = || {
+ for x in explicit_self_ty
+ .into_iter()
+ .map(|x| GenericArgData::Ty(x).intern(Interner))
+ .chain(iter::repeat(ty_error.clone()))
+ .take(self_params)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ assert!(id.is_left());
+ substs.push(x);
+ }
+ }
+ };
+ let mut had_explicit_args = false;
+
+ if let Some(generic_args) = &segment.args_and_bindings {
+ if !generic_args.has_self_type {
+ fill_self_params();
+ }
+ let expected_num = if generic_args.has_self_type {
+ self_params + type_params + const_params
+ } else {
+ type_params + const_params
+ };
+ let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
+ // if args are provided, it should be all of them, but we can't rely on that
+ for arg in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .skip(skip)
+ .take(expected_num)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ if let Some(x) = generic_arg_to_chalk(
+ self.db,
+ id,
+ arg,
+ &mut (),
+ |_, type_ref| self.lower_ty(type_ref),
+ |_, c, ty| {
+ const_or_path_to_chalk(
+ self.db,
+ &self.resolver,
+ ty,
+ c,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ )
+ },
+ ) {
+ had_explicit_args = true;
+ substs.push(x);
+ } else {
+ // we just filtered them out
+ never!("Unexpected lifetime argument");
+ }
+ }
+ }
+ } else {
+ fill_self_params();
+ }
+
+ // handle defaults. In expression or pattern path segments without
+ // explicitly specified type arguments, missing type arguments are inferred
+ // (i.e. defaults aren't used).
+ if !infer_args || had_explicit_args {
+ if let Some(def_generic) = def_generic {
+ let defaults = self.db.generic_defaults(def_generic);
+ assert_eq!(total_len, defaults.len());
+
+ for default_ty in defaults.iter().skip(substs.len()) {
+ // each default can depend on the previous parameters
+ let substs_so_far = Substitution::from_iter(Interner, substs.clone());
+ if let Some(_id) = def_generic_iter.next() {
+ substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
+ }
+ }
+ }
+ }
+
+ // add placeholders for args that were not provided
+ // FIXME: emit diagnostics in contexts where this is not allowed
+ for eid in def_generic_iter {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ // If this assert fails, it means you pushed into subst but didn't call .next() of def_generic_iter
+ assert_eq!(substs.len(), total_len);
+
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn lower_trait_ref_from_path(
+ &self,
+ path: &Path,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ let resolved =
+ match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
+ TypeNs::TraitId(tr) => tr,
+ _ => return None,
+ };
+ let segment = path.segments().last().expect("path should have at least one segment");
+ Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
+ }
+
+ pub(crate) fn lower_trait_ref_from_resolved_path(
+ &self,
+ resolved: TraitId,
+ segment: PathSegment<'_>,
+ explicit_self_ty: Option<Ty>,
+ ) -> TraitRef {
+ let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
+ TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
+ }
+
+ fn lower_trait_ref(
+ &self,
+ trait_ref: &HirTraitRef,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ self.lower_trait_ref_from_path(&trait_ref.path, explicit_self_ty)
+ }
+
+ fn trait_ref_substs_from_path(
+ &self,
+ segment: PathSegment<'_>,
+ resolved: TraitId,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
+ }
+
+ pub(crate) fn lower_where_predicate(
+ &'a self,
+ where_predicate: &'a WherePredicate,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ match where_predicate {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound } => {
+ let self_ty = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref),
+ WherePredicateTypeTarget::TypeOrConstParam(param_id) => {
+ let generic_def = self.resolver.generic_def().expect("generics in scope");
+ let generics = generics(self.db.upcast(), generic_def);
+ let param_id = hir_def::TypeOrConstParamId {
+ parent: generic_def,
+ local_id: *param_id,
+ };
+ let placeholder = to_placeholder_idx(self.db, param_id);
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => TyKind::Placeholder(placeholder),
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ };
+ self.lower_type_bound(bound, self_ty, ignore_bindings)
+ .collect::<Vec<_>>()
+ .into_iter()
+ }
+ WherePredicate::Lifetime { .. } => vec![].into_iter(),
+ }
+ }
+
+ pub(crate) fn lower_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ self_ty: Ty,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let mut bindings = None;
+ let trait_ref = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) => {
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings
+ .clone()
+ .filter(|tr| {
+ // ignore `T: Drop` or `T: Destruct` bounds.
+ // - `T: ~const Drop` has a special meaning in Rust 1.61 that we don't implement.
+ // (So ideally, we'd only ignore `~const Drop` here)
+ // - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
+ // the builtin impls are supported by Chalk, we ignore them here.
+ if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
+ if lang == "drop" || lang == "destruct" {
+ return false;
+ }
+ }
+ true
+ })
+ .map(WhereClause::Implemented)
+ .map(crate::wrap_empty_binders)
+ }
+ TypeBound::Path(path, TraitBoundModifier::Maybe) => {
+ let sized_trait = self
+ .db
+ .lang_item(self.resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ // Don't lower associated type bindings as the only possible relaxed trait bound
+ // `?Sized` has no of them.
+ // If we got another trait here ignore the bound completely.
+ let trait_id = self
+ .lower_trait_ref_from_path(path, Some(self_ty.clone()))
+ .map(|trait_ref| trait_ref.hir_trait_id());
+ if trait_id == sized_trait {
+ self.unsized_types.borrow_mut().insert(self_ty);
+ }
+ None
+ }
+ TypeBound::ForLifetime(_, path) => {
+ // FIXME Don't silently drop the hrtb lifetimes here
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
+ }
+ TypeBound::Lifetime(_) => None,
+ TypeBound::Error => None,
+ };
+ trait_ref.into_iter().chain(
+ bindings
+ .into_iter()
+ .filter(move |_| !ignore_bindings)
+ .flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
+ )
+ }
+
+ fn assoc_type_bindings_from_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ trait_ref: TraitRef,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let last_segment = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => {
+ path.segments().last()
+ }
+ TypeBound::Path(_, TraitBoundModifier::Maybe)
+ | TypeBound::Error
+ | TypeBound::Lifetime(_) => None,
+ };
+ last_segment
+ .into_iter()
+ .filter_map(|segment| segment.args_and_bindings)
+ .flat_map(|args_and_bindings| &args_and_bindings.bindings)
+ .flat_map(move |binding| {
+ let found = associated_type_by_name_including_super_traits(
+ self.db,
+ trait_ref.clone(),
+ &binding.name,
+ );
+ let (super_trait_ref, associated_ty) = match found {
+ None => return SmallVec::new(),
+ Some(t) => t,
+ };
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: super_trait_ref.substitution,
+ };
+ let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
+ binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
+ );
+ if let Some(type_ref) = &binding.type_ref {
+ let ty = self.lower_ty(type_ref);
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
+ for bound in &binding.bounds {
+ preds.extend(self.lower_type_bound(
+ bound,
+ TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
+ false,
+ ));
+ }
+ preds
+ })
+ }
+
+ fn lower_impl_trait(
+ &self,
+ bounds: &[Interned<TypeBound>],
+ func: FunctionId,
+ ) -> ReturnTypeImplTrait {
+ cov_mark::hit!(lower_rpit);
+ let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ let mut predicates: Vec<_> = bounds
+ .iter()
+ .flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
+ .collect();
+
+ if !ctx.unsized_types.borrow().contains(&self_ty) {
+ let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate();
+ let sized_trait = ctx
+ .db
+ .lang_item(krate, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+ let sized_clause = sized_trait.map(|trait_id| {
+ let clause = WhereClause::Implemented(TraitRef {
+ trait_id,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ });
+ crate::wrap_empty_binders(clause)
+ });
+ predicates.extend(sized_clause.into_iter());
+ predicates.shrink_to_fit();
+ }
+ predicates
+ });
+ ReturnTypeImplTrait { bounds: crate::make_single_type_binders(predicates) }
+ }
+}
+
+fn count_impl_traits(type_ref: &TypeRef) -> usize {
+ let mut count = 0;
+ type_ref.walk(&mut |type_ref| {
+ if matches!(type_ref, TypeRef::ImplTrait(_)) {
+ count += 1;
+ }
+ });
+ count
+}
+
+/// Build the signature of a callable item (function, struct or enum variant).
+pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
+ match def {
+ CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
+ CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
+ CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
+ }
+}
+
+pub fn associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ res: TypeNs,
+ cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ named_associated_type_shorthand_candidates(db, def, res, None, cb)
+}
+
+fn named_associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ // If the type parameter is defined in an impl and we're in a method, there
+ // might be additional where clauses to consider
+ def: GenericDefId,
+ res: TypeNs,
+ assoc_name: Option<Name>,
+ mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ let mut search = |t| {
+ for t in all_super_trait_refs(db, t) {
+ let data = db.trait_data(t.hir_trait_id());
+
+ for (name, assoc_id) in &data.items {
+ if let AssocItemId::TypeAliasId(alias) = assoc_id {
+ if let Some(result) = cb(name, &t, *alias) {
+ return Some(result);
+ }
+ }
+ }
+ }
+ None
+ };
+
+ match res {
+ TypeNs::SelfType(impl_id) => search(
+ // we're _in_ the impl -- the binders get added back later. Correct,
+ // but it would be nice to make this more explicit
+ db.impl_trait(impl_id)?.into_value_and_skipped_binders().0,
+ ),
+ TypeNs::GenericParam(param_id) => {
+ let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name);
+ let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() {
+ // FIXME: how to correctly handle higher-ranked bounds here?
+ WhereClause::Implemented(tr) => search(
+ tr.clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("FIXME unexpected higher-ranked trait bound"),
+ ),
+ _ => None,
+ });
+ if let Some(_) = res {
+ return res;
+ }
+ // Handle `Self::Type` referring to own associated type in trait definitions
+ if let GenericDefId::TraitId(trait_id) = param_id.parent() {
+ let generics = generics(db.upcast(), trait_id.into());
+ if generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
+ let trait_ref = TyBuilder::trait_ref(db, trait_id)
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
+ .build();
+ return search(trait_ref);
+ }
+ }
+ None
+ }
+ _ => None,
+ }
+}
+
+/// Build the type of all specific fields of a struct or enum variant.
+pub(crate) fn field_types_query(
+ db: &dyn HirDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
+ let var_data = variant_id.variant_data(db.upcast());
+ let (resolver, def): (_, GenericDefId) = match variant_id {
+ VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
+ };
+ let generics = generics(db.upcast(), def);
+ let mut res = ArenaMap::default();
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ for (field_id, field_data) in var_data.fields().iter() {
+ res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)))
+ }
+ Arc::new(res)
+}
+
+/// This query exists only to be used when resolving short-hand associated types
+/// like `T::Item`.
+///
+/// See the analogous query in rustc and its comment:
+/// <https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46>
+/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
+/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
+/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
+pub(crate) fn generic_predicates_for_param_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ param_id: TypeOrConstParamId,
+ assoc_name: Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+ let mut predicates: Vec<_> = resolver
+ .where_predicates_in_scope()
+ // we have to filter out all other predicates *first*, before attempting to lower them
+ .filter(|pred| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound, .. } => {
+ match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => {
+ if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
+ return false;
+ }
+ }
+ &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ let target_id = TypeOrConstParamId { parent: def, local_id };
+ if target_id != param_id {
+ return false;
+ }
+ }
+ };
+
+ match &**bound {
+ TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
+ // Only lower the bound if the trait could possibly define the associated
+ // type we're looking for.
+
+ let assoc_name = match &assoc_name {
+ Some(it) => it,
+ None => return true,
+ };
+ let tr = match resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ {
+ Some(TypeNs::TraitId(tr)) => tr,
+ _ => return false,
+ };
+
+ all_super_traits(db.upcast(), tr).iter().any(|tr| {
+ db.trait_data(*tr).items.iter().any(|(name, item)| {
+ matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
+ })
+ })
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => false,
+ }
+ }
+ WherePredicate::Lifetime { .. } => false,
+ })
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
+ })
+ .collect();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+pub(crate) fn generic_predicates_for_param_recover(
+ _db: &dyn HirDatabase,
+ _cycle: &[String],
+ _def: &GenericDefId,
+ _param_id: &TypeOrConstParamId,
+ _assoc_name: &Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ Arc::new([])
+}
+
+pub(crate) fn trait_environment_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<TraitEnvironment> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Placeholder);
+ let mut traits_in_scope = Vec::new();
+ let mut clauses = Vec::new();
+ for pred in resolver.where_predicates_in_scope() {
+ for pred in ctx.lower_where_predicate(pred, false) {
+ if let WhereClause::Implemented(tr) = &pred.skip_binders() {
+ traits_in_scope.push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id()));
+ }
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+ }
+
+ let container: Option<ItemContainerId> = match def {
+ // FIXME: is there a function for this?
+ GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
+ GenericDefId::AdtId(_) => None,
+ GenericDefId::TraitId(_) => None,
+ GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
+ GenericDefId::ImplId(_) => None,
+ GenericDefId::EnumVariantId(_) => None,
+ GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
+ };
+ if let Some(ItemContainerId::TraitId(trait_id)) = container {
+ // add `Self: Trait<T1, T2, ...>` to the environment in trait
+ // function default implementations (and speculative code
+ // inside consts or type aliases)
+ cov_mark::hit!(trait_self_implements_self);
+ let substs = TyBuilder::placeholder_subst(db, trait_id);
+ let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
+ let pred = WhereClause::Implemented(trait_ref);
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+
+ let subst = generics(db.upcast(), def).placeholder_subst(db);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_clauses =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver).map(|pred| {
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ program_clause.into_from_env_clause(Interner)
+ });
+ clauses.extend(implicitly_sized_clauses);
+
+ let krate = def.module(db.upcast()).krate();
+
+ let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
+
+ Arc::new(TraitEnvironment { krate, traits_from_clauses: traits_in_scope, env })
+}
+
+/// Resolve the where clause(s) of an item with generics.
+pub(crate) fn generic_predicates_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+
+ let mut predicates = resolver
+ .where_predicates_in_scope()
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p))
+ })
+ .collect::<Vec<_>>();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
+/// Exception is Self of a trait def.
+fn implicitly_sized_clauses<'a>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ explicitly_unsized_tys: &'a FxHashSet<Ty>,
+ substitution: &'a Substitution,
+ resolver: &Resolver,
+) -> impl Iterator<Item = WhereClause> + 'a {
+ let is_trait_def = matches!(def, GenericDefId::TraitId(..));
+ let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
+ let sized_trait = db
+ .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+
+ sized_trait.into_iter().flat_map(move |sized_trait| {
+ let implicitly_sized_tys = generic_args
+ .iter()
+ .filter_map(|generic_arg| generic_arg.ty(Interner))
+ .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty));
+ implicitly_sized_tys.map(move |self_ty| {
+ WhereClause::Implemented(TraitRef {
+ trait_id: sized_trait,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ })
+ })
+ })
+}
+
+/// Resolve the default type params from generics
+pub(crate) fn generic_defaults_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<chalk_ir::GenericArg<Interner>>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generic_params = generics(db.upcast(), def);
+
+ let defaults = generic_params
+ .iter()
+ .enumerate()
+ .map(|(idx, (id, p))| {
+ let p = match p {
+ TypeOrConstParamData::TypeParamData(p) => p,
+ TypeOrConstParamData::ConstParamData(_) => {
+ // FIXME: implement const generic defaults
+ let val = unknown_const_as_generic(
+ db.const_param_ty(ConstParamId::from_unchecked(id)),
+ );
+ return crate::make_binders_with_count(db, idx, &generic_params, val);
+ }
+ };
+ let mut ty =
+ p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
+
+ // Each default can only refer to previous parameters.
+ // type variable default referring to parameter coming
+ // after it. This is forbidden (FIXME: report
+ // diagnostic)
+ ty = fallback_bound_vars(ty, idx);
+ let val = GenericArgData::Ty(ty).intern(Interner);
+ crate::make_binders_with_count(db, idx, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+pub(crate) fn generic_defaults_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ def: &GenericDefId,
+) -> Arc<[Binders<crate::GenericArg>]> {
+ let generic_params = generics(db.upcast(), *def);
+ // FIXME: this code is not covered in tests.
+ // we still need one default per parameter
+ let defaults = generic_params
+ .iter_id()
+ .enumerate()
+ .map(|(count, id)| {
+ let val = match id {
+ itertools::Either::Left(_) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ };
+ crate::make_binders_with_count(db, count, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_params = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let params = data.params.iter().map(|(_, tr)| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let ret = ctx_ret.lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let sig = CallableSig::from_params_and_return(params, ret, data.is_varargs());
+ make_binders(db, &generics, sig)
+}
+
+/// Build the declared type of a function. This should not need to look at the
+/// function body.
+fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::FunctionId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+/// Build the declared type of a const.
+fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
+ let data = db.const_data(def);
+ let generics = generics(db.upcast(), def.into());
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+
+ make_binders(db, &generics, ctx.lower_ty(&data.type_ref))
+}
+
+/// Build the declared type of a static.
+fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
+ let data = db.static_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+
+ Binders::empty(Interner, ctx.lower_ty(&data.type_ref))
+}
+
+fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
+ let struct_data = db.struct_data(def);
+ let fields = struct_data.variant_data.fields();
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple struct constructor.
+fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
+ let struct_data = db.struct_data(def);
+ if let StructKind::Unit = struct_data.variant_data.kind() {
+ return type_for_adt(db, def.into());
+ }
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::StructId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id];
+ let fields = var_data.variant_data.fields();
+ let resolver = def.parent.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple enum variant constructor.
+fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id].variant_data;
+ if let StructKind::Unit = var_data.kind() {
+ return type_for_adt(db, def.parent.into());
+ }
+ let generics = generics(db.upcast(), def.parent.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::EnumVariantId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), adt.into());
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
+ make_binders(db, &generics, ty)
+}
+
+fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), t.into());
+ let resolver = t.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ if db.type_alias_data(t).is_extern {
+ Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
+ } else {
+ let type_ref = &db.type_alias_data(t).type_ref;
+ let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
+ make_binders(db, &generics, inner)
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum CallableDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ EnumVariantId(EnumVariantId),
+}
+impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
+
+impl CallableDefId {
+ pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
+ let db = db.upcast();
+ match self {
+ CallableDefId::FunctionId(f) => f.lookup(db).module(db),
+ CallableDefId::StructId(s) => s.lookup(db).container,
+ CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container,
+ }
+ .krate()
+ }
+}
+
+impl From<CallableDefId> for GenericDefId {
+ fn from(def: CallableDefId) -> GenericDefId {
+ match def {
+ CallableDefId::FunctionId(f) => f.into(),
+ CallableDefId::StructId(s) => s.into(),
+ CallableDefId::EnumVariantId(e) => e.into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum TyDefId {
+ BuiltinType(BuiltinType),
+ AdtId(AdtId),
+ TypeAliasId(TypeAliasId),
+}
+impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ValueTyDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ UnionId(UnionId),
+ EnumVariantId(EnumVariantId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+}
+impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
+
+/// Build the declared type of an item. This depends on the namespace; e.g. for
+/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
+/// the constructor function `(usize) -> Foo` which lives in the values
+/// namespace.
+pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
+ match def {
+ TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
+ TyDefId::AdtId(it) => type_for_adt(db, it),
+ TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
+ }
+}
+
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
+ let generics = match *def {
+ TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
+ TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
+ TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
+ };
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
+ match def {
+ ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
+ ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
+ ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
+ ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
+ ValueTyDefId::ConstId(it) => type_for_const(db, it),
+ ValueTyDefId::StaticId(it) => type_for_static(db, it),
+ }
+}
+
+pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_self_ty_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let generics = generics(db.upcast(), impl_id.into());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ make_binders(db, &generics, ctx.lower_ty(&impl_data.self_ty))
+}
+
+// returns None if def is a type arg
+pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
+ let parent_data = db.generic_params(def.parent());
+ let data = &parent_data.type_or_consts[def.local_id()];
+ let resolver = def.parent().resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ never!();
+ Ty::new(Interner, TyKind::Error)
+ }
+ TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(&d.ty),
+ }
+}
+
+pub(crate) fn impl_self_ty_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ impl_id: &ImplId,
+) -> Binders<Ty> {
+ let generics = generics(db.upcast(), (*impl_id).into());
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_trait_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
+ let target_trait = impl_data.target_trait.as_ref()?;
+ Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, Some(self_ty))?))
+}
+
+pub(crate) fn return_type_impl_traits(
+ db: &dyn HirDatabase,
+ def: hir_def::FunctionId,
+) -> Option<Arc<Binders<ReturnTypeImplTraits>>> {
+ // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let _ret = (&ctx_ret).lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let return_type_impl_traits =
+ ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
+ if return_type_impl_traits.impl_traits.is_empty() {
+ None
+ } else {
+ Some(Arc::new(make_binders(db, &generics, return_type_impl_traits)))
+ }
+}
+
+pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
+ match m {
+ hir_def::type_ref::Mutability::Shared => Mutability::Not,
+ hir_def::type_ref::Mutability::Mut => Mutability::Mut,
+ }
+}
+
+/// Checks if the provided generic arg matches its expected kind, then lower them via
+/// provided closures. Use unknown if there was kind mismatch.
+///
+/// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime.
+pub(crate) fn generic_arg_to_chalk<'a, T>(
+ db: &dyn HirDatabase,
+ kind_id: Either<TypeParamId, ConstParamId>,
+ arg: &'a GenericArg,
+ this: &mut T,
+ for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
+ for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
+) -> Option<crate::GenericArg> {
+ let kind = match kind_id {
+ Either::Left(_) => ParamKind::Type,
+ Either::Right(id) => {
+ let ty = db.const_param_ty(id);
+ ParamKind::Const(ty)
+ }
+ };
+ Some(match (arg, kind) {
+ (GenericArg::Type(type_ref), ParamKind::Type) => {
+ let ty = for_type(this, type_ref);
+ GenericArgData::Ty(ty).intern(Interner)
+ }
+ (GenericArg::Const(c), ParamKind::Const(c_ty)) => {
+ GenericArgData::Const(for_const(this, c, c_ty)).intern(Interner)
+ }
+ (GenericArg::Const(_), ParamKind::Type) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ (GenericArg::Type(t), ParamKind::Const(c_ty)) => {
+ // We want to recover simple idents, which parser detects them
+ // as types. Maybe here is not the best place to do it, but
+ // it works.
+ if let TypeRef::Path(p) = t {
+ let p = p.mod_path();
+ if p.kind == PathKind::Plain {
+ if let [n] = p.segments() {
+ let c = ConstScalarOrPath::Path(n.clone());
+ return Some(
+ GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
+ );
+ }
+ }
+ }
+ unknown_const_as_generic(c_ty)
+ }
+ (GenericArg::Lifetime(_), _) => return None,
+ })
+}
+
+pub(crate) fn const_or_path_to_chalk(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ expected_ty: Ty,
+ value: &ConstScalarOrPath,
+ mode: ParamLoweringMode,
+ args: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Const {
+ match value {
+ ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty),
+ ConstScalarOrPath::Path(n) => {
+ let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
+ path_to_const(db, resolver, &path, mode, args, debruijn)
+ .unwrap_or_else(|| unknown_const(expected_ty))
+ }
+ }
+}
+
+/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
+/// num_vars_to_keep) by `TyKind::Unknown`.
+fn fallback_bound_vars<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ s: T,
+ num_vars_to_keep: usize,
+) -> T {
+ crate::fold_free_vars(
+ s,
+ |bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ TyKind::Error.intern(Interner)
+ } else {
+ bound.shifted_in_from(binders).to_ty(Interner)
+ }
+ },
+ |ty, bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ unknown_const(ty.clone())
+ } else {
+ bound.shifted_in_from(binders).to_const(Interner, ty)
+ }
+ },
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
new file mode 100644
index 000000000..d765fee0e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
@@ -0,0 +1,148 @@
+//! This module contains the implementations of the `ToChalk` trait, which
+//! handles conversion between our data types and their corresponding types in
+//! Chalk (in both directions); plus some helper functions for more specialized
+//! conversions.
+
+use chalk_solve::rust_ir;
+
+use base_db::salsa::{self, InternKey};
+use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId};
+
+use crate::{
+ chalk_db, db::HirDatabase, AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId,
+ Interner, OpaqueTyId, PlaceholderIndex,
+};
+
+pub(crate) trait ToChalk {
+ type Chalk;
+ fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
+ fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
+}
+
+pub(crate) fn from_chalk<T, ChalkT>(db: &dyn HirDatabase, chalk: ChalkT) -> T
+where
+ T: ToChalk<Chalk = ChalkT>,
+{
+ T::from_chalk(db, chalk)
+}
+
+impl ToChalk for hir_def::ImplId {
+ type Chalk = chalk_db::ImplId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId {
+ chalk_ir::ImplId(self.as_intern_id())
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId {
+ InternKey::from_intern_id(impl_id.0)
+ }
+}
+
+impl ToChalk for CallableDefId {
+ type Chalk = FnDefId;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId {
+ db.intern_callable_def(self).into()
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId {
+ db.lookup_intern_callable_def(fn_def_id.into())
+ }
+}
+
+pub(crate) struct TypeAliasAsValue(pub(crate) TypeAliasId);
+
+impl ToChalk for TypeAliasAsValue {
+ type Chalk = chalk_db::AssociatedTyValueId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId {
+ rust_ir::AssociatedTyValueId(self.0.as_intern_id())
+ }
+
+ fn from_chalk(
+ _db: &dyn HirDatabase,
+ assoc_ty_value_id: chalk_db::AssociatedTyValueId,
+ ) -> TypeAliasAsValue {
+ TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
+ }
+}
+
+impl From<FnDefId> for crate::db::InternedCallableDefId {
+ fn from(fn_def_id: FnDefId) -> Self {
+ InternKey::from_intern_id(fn_def_id.0)
+ }
+}
+
+impl From<crate::db::InternedCallableDefId> for FnDefId {
+ fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
+ chalk_ir::FnDefId(callable_def_id.as_intern_id())
+ }
+}
+
+impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
+ fn from(id: OpaqueTyId) -> Self {
+ InternKey::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
+ fn from(id: crate::db::InternedOpaqueTyId) -> Self {
+ chalk_ir::OpaqueTyId(id.as_intern_id())
+ }
+}
+
+impl From<chalk_ir::ClosureId<Interner>> for crate::db::InternedClosureId {
+ fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
+ Self::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::InternedClosureId> for chalk_ir::ClosureId<Interner> {
+ fn from(id: crate::db::InternedClosureId) -> Self {
+ chalk_ir::ClosureId(id.as_intern_id())
+ }
+}
+
+pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId {
+ chalk_ir::ForeignDefId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId {
+ salsa::InternKey::from_intern_id(id.0)
+}
+
+pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId {
+ chalk_ir::AssocTypeId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
+ salsa::InternKey::from_intern_id(id.0)
+}
+
+pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
+ assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
+ let interned_id = salsa::InternKey::from_intern_id(salsa::InternId::from(idx.idx));
+ db.lookup_intern_type_or_const_param_id(interned_id)
+}
+
+pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> PlaceholderIndex {
+ let interned_id = db.intern_type_or_const_param_id(id);
+ PlaceholderIndex {
+ ui: chalk_ir::UniverseIndex::ROOT,
+ idx: salsa::InternKey::as_intern_id(&interned_id).as_usize(),
+ }
+}
+
+pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
+ assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
+ let interned_id = salsa::InternKey::from_intern_id(salsa::InternId::from(idx.idx));
+ db.lookup_intern_lifetime_param_id(interned_id)
+}
+
+pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId {
+ chalk_ir::TraitId(salsa::InternKey::as_intern_id(&id))
+}
+
+pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId {
+ salsa::InternKey::from_intern_id(id.0)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
new file mode 100644
index 000000000..15df7b3dd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -0,0 +1,1186 @@
+//! This module is concerned with finding methods that a given type provides.
+//! For details about how this works in rustc, see the method lookup page in the
+//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
+//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateId, Edition};
+use chalk_ir::{cast::Cast, Mutability, UniverseIndex};
+use hir_def::{
+ data::ImplData, item_scope::ItemScope, nameres::DefMap, AssocItemId, BlockId, ConstId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
+ TraitId,
+};
+use hir_expand::name::Name;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::never;
+
+use crate::{
+ autoderef::{self, AutoderefKind},
+ db::HirDatabase,
+ from_foreign_def_id,
+ infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
+ primitive::{FloatTy, IntTy, UintTy},
+ static_lifetime,
+ utils::all_super_traits,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
+ Scalar, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+};
+
+/// This is used as a key for indexing impls.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TyFingerprint {
+ // These are lang item impls:
+ Str,
+ Slice,
+ Array,
+ Never,
+ RawPtr(Mutability),
+ Scalar(Scalar),
+ // These can have user-defined impls:
+ Adt(hir_def::AdtId),
+ Dyn(TraitId),
+ ForeignType(ForeignDefId),
+ // These only exist for trait impls
+ Unit,
+ Unnameable,
+ Function(u32),
+}
+
+impl TyFingerprint {
+ /// Creates a TyFingerprint for looking up an inherent impl. Only certain
+ /// types can have inherent impls: if we have some `struct S`, we can have
+ /// an `impl S`, but not `impl &S`. Hence, this will return `None` for
+ /// reference types and such.
+ pub fn for_inherent_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ _ => return None,
+ };
+ Some(fp)
+ }
+
+ /// Creates a TyFingerprint for looking up a trait impl.
+ pub fn for_trait_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
+ TyKind::Tuple(_, subst) => {
+ let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(Interner));
+ match first_ty {
+ Some(ty) => return TyFingerprint::for_trait_impl(ty),
+ None => TyFingerprint::Unit,
+ }
+ }
+ TyKind::AssociatedType(_, _)
+ | TyKind::OpaqueType(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Closure(_, _)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => TyFingerprint::Unnameable,
+ TyKind::Function(fn_ptr) => {
+ TyFingerprint::Function(fn_ptr.substitution.0.len(Interner) as u32)
+ }
+ TyKind::Alias(_)
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Error => return None,
+ };
+ Some(fp)
+ }
+}
+
+pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I8)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I16)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I32)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I64)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I128)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::Isize)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U8)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U16)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U32)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U64)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U128)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)),
+];
+
+pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)),
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)),
+];
+
+/// Trait impls defined or available in some crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct TraitImpls {
+ // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
+ map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
+}
+
+impl TraitImpls {
+ pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}"));
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn trait_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let _p = profile::span("trait_impls_in_block_query");
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let block_def_map = db.block_def_map(block)?;
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+
+ Some(Arc::new(impls))
+ }
+
+ pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
+ let crate_graph = db.crate_graph();
+ let mut res = Self { map: FxHashMap::default() };
+
+ for krate in crate_graph.transitive_deps(krate) {
+ res.merge(&db.trait_impls_in_crate(krate));
+ }
+ res.shrink_to_fit();
+
+ Arc::new(res)
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.shrink_to_fit();
+ self.map.values_mut().for_each(|map| {
+ map.shrink_to_fit();
+ map.values_mut().for_each(Vec::shrink_to_fit);
+ });
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let target_trait = match db.impl_trait(impl_id) {
+ Some(tr) => tr.skip_binders().hir_trait_id(),
+ None => continue,
+ };
+ let self_ty = db.impl_self_ty(impl_id);
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.skip_binders());
+ self.map
+ .entry(target_trait)
+ .or_default()
+ .entry(self_ty_fp)
+ .or_default()
+ .push(impl_id);
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ fn merge(&mut self, other: &Self) {
+ for (trait_, other_map) in &other.map {
+ let map = self.map.entry(*trait_).or_default();
+ for (fp, impls) in other_map {
+ map.entry(*fp).or_default().extend(impls);
+ }
+ }
+ }
+
+ /// Queries all trait impls for the given type.
+ pub fn for_self_ty_without_blanket_impls(
+ &self,
+ fp: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .values()
+ .flat_map(move |impls| impls.get(&Some(fp)).into_iter())
+ .flat_map(|it| it.iter().copied())
+ }
+
+ /// Queries all impls of the given trait.
+ pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+
+ /// Queries all impls of `trait_` that may apply to `self_ty`.
+ pub fn for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(move |map| map.get(&Some(self_ty)).into_iter().chain(map.get(&None)))
+ .flat_map(|v| v.iter().copied())
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+}
+
+/// Inherent impls defined in some crate.
+///
+/// Inherent impls can only be defined in the crate that also defines the self type of the impl
+/// (note that some primitives are considered to be defined by both libcore and liballoc).
+///
+/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
+/// single crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct InherentImpls {
+ map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+}
+
+impl InherentImpls {
+ pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn inherent_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let mut impls = Self { map: FxHashMap::default() };
+ if let Some(block_def_map) = db.block_def_map(block) {
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+ return Some(Arc::new(impls));
+ }
+ None
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.values_mut().for_each(Vec::shrink_to_fit);
+ self.map.shrink_to_fit();
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let data = db.impl_data(impl_id);
+ if data.target_trait.is_some() {
+ continue;
+ }
+
+ let self_ty = db.impl_self_ty(impl_id);
+ let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
+ if let Some(fp) = fp {
+ self.map.entry(fp).or_default().push(impl_id);
+ }
+ // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
+ match TyFingerprint::for_inherent_impl(self_ty) {
+ Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
+ None => &[],
+ }
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|v| v.iter().copied())
+ }
+}
+
+pub fn inherent_impl_crates_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ fp: TyFingerprint,
+) -> ArrayVec<CrateId, 2> {
+ let _p = profile::span("inherent_impl_crates_query");
+ let mut res = ArrayVec::new();
+ let crate_graph = db.crate_graph();
+
+ for krate in crate_graph.transitive_deps(krate) {
+ if res.is_full() {
+ // we don't currently look for or store more than two crates here,
+ // so don't needlessly look at more crates than necessary.
+ break;
+ }
+ let impls = db.inherent_impls_in_crate(krate);
+ if impls.map.get(&fp).map_or(false, |v| !v.is_empty()) {
+ res.push(krate);
+ }
+ }
+
+ res
+}
+
+fn collect_unnamed_consts<'a>(
+ db: &'a dyn HirDatabase,
+ scope: &'a ItemScope,
+) -> impl Iterator<Item = ConstId> + 'a {
+ let unnamed_consts = scope.unnamed_consts();
+
+ // FIXME: Also treat consts named `_DERIVE_*` as unnamed, since synstructure generates those.
+ // Should be removed once synstructure stops doing that.
+ let synstructure_hack_consts = scope.values().filter_map(|(item, _)| match item {
+ ModuleDefId::ConstId(id) => {
+ let loc = id.lookup(db.upcast());
+ let item_tree = loc.id.item_tree(db.upcast());
+ if item_tree[loc.id.value]
+ .name
+ .as_ref()
+ .map_or(false, |n| n.to_smol_str().starts_with("_DERIVE_"))
+ {
+ Some(id)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ });
+
+ unnamed_consts.chain(synstructure_hack_consts)
+}
+
+pub fn def_crates(
+ db: &dyn HirDatabase,
+ ty: &Ty,
+ cur_crate: CrateId,
+) -> Option<ArrayVec<CrateId, 2>> {
+ let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect());
+
+ let fp = TyFingerprint::for_inherent_impl(ty);
+
+ match ty.kind(Interner) {
+ TyKind::Adt(AdtId(def_id), _) => mod_to_crate_ids(def_id.module(db.upcast())),
+ TyKind::Foreign(id) => {
+ mod_to_crate_ids(from_foreign_def_id(*id).lookup(db.upcast()).module(db.upcast()))
+ }
+ TyKind::Dyn(_) => ty
+ .dyn_trait()
+ .and_then(|trait_| mod_to_crate_ids(GenericDefId::TraitId(trait_).module(db.upcast()))),
+ // for primitives, there may be impls in various places (core and alloc
+ // mostly). We just check the whole crate graph for crates with impls
+ // (cached behind a query).
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Slice(_)
+ | TyKind::Array(..)
+ | TyKind::Raw(..) => {
+ Some(db.inherent_impl_crates(cur_crate, fp.expect("fingerprint for primitive")))
+ }
+ _ => return None,
+ }
+}
+
+/// Look up the method with the given name.
+pub(crate) fn lookup_method(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: &Name,
+) -> Option<(ReceiverAdjustments, FunctionId)> {
+ iterate_method_candidates(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ Some(name),
+ LookupMode::MethodCall,
+ |adjustments, f| match f {
+ AssocItemId::FunctionId(f) => Some((adjustments, f)),
+ _ => None,
+ },
+ )
+}
+
+/// Whether we're looking up a dotted method call (like `v.len()`) or a path
+/// (like `Vec::new`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum LookupMode {
+ /// Looking up a method call like `v.len()`: We only consider candidates
+ /// that have a `self` parameter, and do autoderef.
+ MethodCall,
+ /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
+ /// candidates including associated constants, but don't do autoderef.
+ Path,
+}
+
+#[derive(Clone, Copy)]
+pub enum VisibleFromModule {
+ /// Filter for results that are visible from the given module
+ Filter(ModuleId),
+ /// Include impls from the given block.
+ IncludeBlock(BlockId),
+ /// Do nothing special in regards visibility
+ None,
+}
+
+impl From<Option<ModuleId>> for VisibleFromModule {
+ fn from(module: Option<ModuleId>) -> Self {
+ match module {
+ Some(module) => Self::Filter(module),
+ None => Self::None,
+ }
+ }
+}
+
+impl From<Option<BlockId>> for VisibleFromModule {
+ fn from(block: Option<BlockId>) -> Self {
+ match block {
+ Some(block) => Self::IncludeBlock(block),
+ None => Self::None,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct ReceiverAdjustments {
+ autoref: Option<Mutability>,
+ autoderefs: usize,
+ unsize_array: bool,
+}
+
+impl ReceiverAdjustments {
+ pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec<Adjustment>) {
+ let mut ty = ty;
+ let mut adjust = Vec::new();
+ for _ in 0..self.autoderefs {
+ match autoderef::autoderef_step(table, ty.clone()) {
+ None => {
+ never!("autoderef not possible for {:?}", ty);
+ ty = TyKind::Error.intern(Interner);
+ break;
+ }
+ Some((kind, new_ty)) => {
+ ty = new_ty.clone();
+ adjust.push(Adjustment {
+ kind: Adjust::Deref(match kind {
+ // FIXME should we know the mutability here?
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ }),
+ target: new_ty,
+ });
+ }
+ }
+ }
+ if self.unsize_array {
+ ty = match ty.kind(Interner) {
+ TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner),
+ _ => {
+ never!("unsize_array with non-array {:?}", ty);
+ ty
+ }
+ };
+ // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference
+ adjust.push(Adjustment {
+ kind: Adjust::Pointer(PointerCast::Unsize),
+ target: ty.clone(),
+ });
+ }
+ if let Some(m) = self.autoref {
+ ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
+ adjust
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
+ }
+ (ty, adjust)
+ }
+
+ fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments {
+ Self { autoref: Some(m), ..*self }
+ }
+}
+
+// This would be nicer if it just returned an iterator, but that runs into
+// lifetime problems, because we need to borrow temp `CrateImplDefs`.
+// FIXME add a context type here?
+pub(crate) fn iterate_method_candidates<T>(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option<T>,
+) -> Option<T> {
+ let mut slot = None;
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ mode,
+ &mut |adj, item| {
+ assert!(slot.is_none());
+ if let Some(it) = callback(adj, item) {
+ slot = Some(it);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+}
+
+pub fn lookup_impl_method(
+ self_ty: &Ty,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ name: &Name,
+) -> Option<FunctionId> {
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty)?;
+ let trait_impls = db.trait_impls_in_deps(env.krate);
+ let impls = trait_impls.for_trait_and_self_ty(trait_, self_ty_fp);
+ let mut table = InferenceTable::new(db, env.clone());
+ find_matching_impl(impls, &mut table, &self_ty).and_then(|data| {
+ data.items.iter().find_map(|it| match it {
+ AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
+ _ => None,
+ })
+ })
+}
+
+fn find_matching_impl(
+ mut impls: impl Iterator<Item = ImplId>,
+ table: &mut InferenceTable<'_>,
+ self_ty: &Ty,
+) -> Option<Arc<ImplData>> {
+ let db = table.db;
+ loop {
+ let impl_ = impls.next()?;
+ let r = table.run_in_snapshot(|table| {
+ let impl_data = db.impl_data(impl_);
+ let substs =
+ TyBuilder::subst_for_def(db, impl_).fill_with_inference_vars(table).build();
+ let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs);
+
+ table
+ .unify(self_ty, &impl_ty)
+ .then(|| {
+ let wh_goals =
+ crate::chalk_db::convert_where_clauses(db, impl_.into(), &substs)
+ .into_iter()
+ .map(|b| b.cast(Interner));
+
+ let goal = crate::Goal::all(Interner, wh_goals);
+
+ table.try_obligation(goal).map(|_| impl_data)
+ })
+ .flatten()
+ });
+ if r.is_some() {
+ break r;
+ }
+ }
+}
+
+pub fn iterate_path_candidates(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ LookupMode::Path,
+ // the adjustments are not relevant for path lookup
+ &mut |_, id| callback(id),
+ )
+}
+
+pub fn iterate_method_candidates_dyn(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ match mode {
+ LookupMode::MethodCall => {
+ // For method calls, rust first does any number of autoderef, and
+ // then one autoref (i.e. when the method takes &self or &mut self).
+ // Note that when we've got a receiver like &S, even if the method
+ // we find in the end takes &self, we still do the autoderef step
+ // (just as rustc does an autoderef and then autoref again).
+
+ // We have to be careful about the order we're looking at candidates
+ // in here. Consider the case where we're resolving `x.clone()`
+ // where `x: &Vec<_>`. This resolves to the clone method with self
+ // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
+ // the receiver type exactly matches before cases where we have to
+ // do autoref. But in the autoderef steps, the `&_` self type comes
+ // up *before* the `Vec<_>` self type.
+ //
+ // On the other hand, we don't want to just pick any by-value method
+ // before any by-autoref method; it's just that we need to consider
+ // the methods by autoderef order of *receiver types*, not *self
+ // types*.
+
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty.clone());
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+
+ let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
+ iterate_method_candidates_with_autoref(
+ &receiver_ty,
+ adj,
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ });
+ result
+ }
+ LookupMode::Path => {
+ // No autoderef for path lookups
+ iterate_method_candidates_for_self_ty(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ }
+ }
+}
+
+fn iterate_method_candidates_with_autoref(
+ receiver_ty: &Canonical<Ty>,
+ first_adjustment: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) {
+ // don't try to resolve methods on unknown types
+ return ControlFlow::Continue(());
+ }
+
+ iterate_method_candidates_by_receiver(
+ receiver_ty,
+ first_adjustment.clone(),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let refed = Canonical {
+ value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &refed,
+ first_adjustment.with_autoref(Mutability::Not),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let ref_muted = Canonical {
+ value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &ref_muted,
+ first_adjustment.with_autoref(Mutability::Mut),
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )
+}
+
+fn iterate_method_candidates_by_receiver(
+ receiver_ty: &Canonical<Ty>,
+ receiver_adjustments: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let receiver_ty = table.instantiate_canonical(receiver_ty.clone());
+ let snapshot = table.snapshot();
+ // We're looking for methods with *receiver* type receiver_ty. These could
+ // be found in any of the derefs of receiver_ty, so we have to go through
+ // that.
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_inherent_methods(
+ &self_ty,
+ &mut autoderef.table,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ visible_from_module,
+ &mut callback,
+ )?
+ }
+
+ table.rollback_to(snapshot);
+
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut autoderef.table,
+ traits_in_scope,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ &mut callback,
+ )?
+ }
+
+ ControlFlow::Continue(())
+}
+
+fn iterate_method_candidates_for_self_ty(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let self_ty = table.instantiate_canonical(self_ty.clone());
+ iterate_inherent_methods(
+ &self_ty,
+ &mut table,
+ name,
+ None,
+ None,
+ visible_from_module,
+ &mut callback,
+ )?;
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut table,
+ traits_in_scope,
+ name,
+ None,
+ None,
+ callback,
+ )
+}
+
+fn iterate_trait_method_candidates(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let self_is_array = matches!(self_ty.kind(Interner), chalk_ir::TyKind::Array(..));
+ // if ty is `dyn Trait`, the trait doesn't need to be in scope
+ let inherent_trait =
+ self_ty.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t));
+ let env_traits = matches!(self_ty.kind(Interner), TyKind::Placeholder(_))
+ // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
+ .then(|| {
+ env.traits_in_scope_from_clauses(self_ty.clone())
+ .flat_map(|t| all_super_traits(db.upcast(), t))
+ })
+ .into_iter()
+ .flatten();
+ let traits = inherent_trait.chain(env_traits).chain(traits_in_scope.iter().copied());
+
+ let canonical_self_ty = table.canonicalize(self_ty.clone()).value;
+
+ 'traits: for t in traits {
+ let data = db.trait_data(t);
+
+ // Traits annotated with `#[rustc_skip_array_during_method_dispatch]` are skipped during
+ // method resolution, if the receiver is an array, and we're compiling for editions before
+ // 2021.
+ // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for
+ // arrays.
+ if data.skip_array_during_method_dispatch && self_is_array {
+ // FIXME: this should really be using the edition of the method name's span, in case it
+ // comes from a macro
+ if db.crate_graph()[env.krate].edition < Edition::Edition2021 {
+ continue;
+ }
+ }
+
+ // we'll be lazy about checking whether the type implements the
+ // trait, but if we find out it doesn't, we'll skip the rest of the
+ // iteration
+ let mut known_implemented = false;
+ for &(_, item) in data.items.iter() {
+ // Don't pass a `visible_from_module` down to `is_valid_candidate`,
+ // since only inherent methods should be included into visibility checking.
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, None) {
+ continue;
+ }
+ if !known_implemented {
+ let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_none() {
+ continue 'traits;
+ }
+ }
+ known_implemented = true;
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+}
+
+fn iterate_inherent_methods(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: VisibleFromModule,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let def_crates = match def_crates(db, self_ty, env.krate) {
+ Some(k) => k,
+ None => return ControlFlow::Continue(()),
+ };
+
+ let (module, block) = match visible_from_module {
+ VisibleFromModule::Filter(module) => (Some(module), module.containing_block()),
+ VisibleFromModule::IncludeBlock(block) => (None, Some(block)),
+ VisibleFromModule::None => (None, None),
+ };
+
+ if let Some(block_id) = block {
+ if let Some(impls) = db.inherent_impls_in_block(block_id) {
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ }
+
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ return ControlFlow::Continue(());
+
+ fn impls_for_self_ty(
+ impls: &InherentImpls,
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: Option<ModuleId>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+ ) -> ControlFlow<()> {
+ let db = table.db;
+ let impls_for_self_ty = impls.for_self_ty(self_ty);
+ for &impl_def in impls_for_self_ty {
+ for &item in &db.impl_data(impl_def).items {
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, visible_from_module)
+ {
+ continue;
+ }
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+ }
+}
+
+/// Returns the receiver type for the index trait call.
+pub fn resolve_indexing_op(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+ index_trait: TraitId,
+) -> Option<ReceiverAdjustments> {
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty);
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+ for (ty, adj) in deref_chain.into_iter().zip(adj) {
+ let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
+ return Some(adj);
+ }
+ }
+ None
+}
+
+fn is_valid_candidate(
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ item: AssocItemId,
+ self_ty: &Ty,
+ visible_from_module: Option<ModuleId>,
+) -> bool {
+ macro_rules! check_that {
+ ($cond:expr) => {
+ if !$cond {
+ return false;
+ }
+ };
+ }
+
+ let db = table.db;
+ match item {
+ AssocItemId::FunctionId(m) => {
+ let data = db.function_data(m);
+
+ check_that!(name.map_or(true, |n| n == &data.name));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.function_visibility(m).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(autoderef_candidate_not_visible);
+ }
+ v
+ }));
+
+ table.run_in_snapshot(|table| {
+ let subst = TyBuilder::subst_for_def(db, m).fill_with_inference_vars(table).build();
+ let expect_self_ty = match m.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) => {
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone()
+ }
+ ItemContainerId::ImplId(impl_id) => {
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner)
+ }
+ // We should only get called for associated items (impl/trait)
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ unreachable!()
+ }
+ };
+ check_that!(table.unify(&expect_self_ty, self_ty));
+ if let Some(receiver_ty) = receiver_ty {
+ check_that!(data.has_self_param());
+
+ let sig = db.callable_item_signature(m.into());
+ let expected_receiver =
+ sig.map(|s| s.params()[0].clone()).substitute(Interner, &subst);
+
+ check_that!(table.unify(&receiver_ty, &expected_receiver));
+ }
+ true
+ })
+ }
+ AssocItemId::ConstId(c) => {
+ let data = db.const_data(c);
+ check_that!(receiver_ty.is_none());
+
+ check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n)));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.const_visibility(c).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(const_candidate_not_visible);
+ }
+ v
+ }));
+ if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container {
+ let self_ty_matches = table.run_in_snapshot(|table| {
+ let subst =
+ TyBuilder::subst_for_def(db, c).fill_with_inference_vars(table).build();
+ let expected_self_ty =
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner);
+ table.unify(&expected_self_ty, &self_ty)
+ });
+ if !self_ty_matches {
+ cov_mark::hit!(const_candidate_self_type_mismatch);
+ return false;
+ }
+ }
+ true
+ }
+ _ => false,
+ }
+}
+
+pub fn implements_trait(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ solution.is_some()
+}
+
+pub fn implements_trait_unique(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ matches!(solution, Some(crate::Solution::Unique(_)))
+}
+
+/// This creates Substs for a trait with the given Self type and type variables
+/// for all other parameters, to query Chalk with it.
+fn generic_implements_goal(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ self_ty: &Canonical<Ty>,
+) -> Canonical<InEnvironment<super::DomainGoal>> {
+ let mut kinds = self_ty.binders.interned().to_vec();
+ let trait_ref = TyBuilder::trait_ref(db, trait_)
+ .push(self_ty.value.clone())
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
+ .build();
+ kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
+ let vk = match x.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
+ chalk_ir::GenericArgData::Const(c) => {
+ chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
+ }
+ };
+ chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
+ }));
+ let obligation = trait_ref.cast(Interner);
+ Canonical {
+ binders: CanonicalVarKinds::from_iter(Interner, kinds),
+ value: InEnvironment::new(&env.env, obligation),
+ }
+}
+
+fn autoderef_method_receiver(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
+ let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
+ let mut autoderef = autoderef::Autoderef::new(table, ty);
+ while let Some((ty, derefs)) = autoderef.next() {
+ deref_chain.push(autoderef.table.canonicalize(ty).value);
+ adjustments.push(ReceiverAdjustments {
+ autoref: None,
+ autoderefs: derefs,
+ unsize_array: false,
+ });
+ }
+ // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
+ if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
+ deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
+ adjustments.last().cloned(),
+ ) {
+ let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
+ deref_chain.push(Canonical { value: unsized_ty, binders });
+ adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
+ }
+ (deref_chain, adjustments)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs
new file mode 100644
index 000000000..d7f48c69a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/primitive.rs
@@ -0,0 +1,62 @@
+//! A few helper functions for dealing with primitives.
+
+pub use chalk_ir::{FloatTy, IntTy, UintTy};
+pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint};
+
+pub fn int_ty_to_string(ty: IntTy) -> &'static str {
+ match ty {
+ IntTy::Isize => "isize",
+ IntTy::I8 => "i8",
+ IntTy::I16 => "i16",
+ IntTy::I32 => "i32",
+ IntTy::I64 => "i64",
+ IntTy::I128 => "i128",
+ }
+}
+
+pub fn uint_ty_to_string(ty: UintTy) -> &'static str {
+ match ty {
+ UintTy::Usize => "usize",
+ UintTy::U8 => "u8",
+ UintTy::U16 => "u16",
+ UintTy::U32 => "u32",
+ UintTy::U64 => "u64",
+ UintTy::U128 => "u128",
+ }
+}
+
+pub fn float_ty_to_string(ty: FloatTy) -> &'static str {
+ match ty {
+ FloatTy::F32 => "f32",
+ FloatTy::F64 => "f64",
+ }
+}
+
+pub(super) fn int_ty_from_builtin(t: BuiltinInt) -> IntTy {
+ match t {
+ BuiltinInt::Isize => IntTy::Isize,
+ BuiltinInt::I8 => IntTy::I8,
+ BuiltinInt::I16 => IntTy::I16,
+ BuiltinInt::I32 => IntTy::I32,
+ BuiltinInt::I64 => IntTy::I64,
+ BuiltinInt::I128 => IntTy::I128,
+ }
+}
+
+pub(super) fn uint_ty_from_builtin(t: BuiltinUint) -> UintTy {
+ match t {
+ BuiltinUint::Usize => UintTy::Usize,
+ BuiltinUint::U8 => UintTy::U8,
+ BuiltinUint::U16 => UintTy::U16,
+ BuiltinUint::U32 => UintTy::U32,
+ BuiltinUint::U64 => UintTy::U64,
+ BuiltinUint::U128 => UintTy::U128,
+ }
+}
+
+pub(super) fn float_ty_from_builtin(t: BuiltinFloat) -> FloatTy {
+ match t {
+ BuiltinFloat::F32 => FloatTy::F32,
+ BuiltinFloat::F64 => FloatTy::F64,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
new file mode 100644
index 000000000..dc7252f70
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -0,0 +1,150 @@
+//! Database used for testing `hir`.
+
+use std::{
+ fmt, panic,
+ sync::{Arc, Mutex},
+};
+
+use base_db::{
+ salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+};
+use hir_def::{db::DefDatabase, ModuleId};
+use hir_expand::db::AstDatabase;
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::TextRange;
+use test_utils::extract_annotations;
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir_expand::db::AstDatabaseStorage,
+ hir_def::db::InternDatabaseStorage,
+ hir_def::db::DefDatabaseStorage,
+ crate::db::HirDatabaseStorage
+)]
+pub(crate) struct TestDB {
+ storage: salsa::Storage<TestDB>,
+ events: Mutex<Option<Vec<salsa::Event>>>,
+}
+
+impl Default for TestDB {
+ fn default() -> Self {
+ let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.set_enable_proc_attr_macros(true);
+ this
+ }
+}
+
+impl fmt::Debug for TestDB {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TestDB").finish()
+ }
+}
+
+impl Upcast<dyn AstDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl salsa::Database for TestDB {
+ fn salsa_event(&self, event: salsa::Event) {
+ let mut events = self.events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+}
+
+impl salsa::ParallelDatabase for TestDB {
+ fn snapshot(&self) -> salsa::Snapshot<TestDB> {
+ salsa::Snapshot::new(TestDB {
+ storage: self.storage.snapshot(),
+ events: Default::default(),
+ })
+ }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
+impl FileLoader for TestDB {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl TestDB {
+ pub(crate) fn module_for_file_opt(&self, file_id: FileId) -> Option<ModuleId> {
+ for &krate in self.relevant_crates(file_id).iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (local_id, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return Some(crate_def_map.module_id(local_id));
+ }
+ }
+ }
+ None
+ }
+
+ pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
+ self.module_for_file_opt(file_id).unwrap()
+ }
+
+ pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
+ let mut files = Vec::new();
+ let crate_graph = self.crate_graph();
+ for krate in crate_graph.iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (module_id, _) in crate_def_map.modules() {
+ let file_id = crate_def_map[module_id].origin.file_id();
+ files.extend(file_id)
+ }
+ }
+ files
+ .into_iter()
+ .filter_map(|file_id| {
+ let text = self.file_text(file_id);
+ let annotations = extract_annotations(&text);
+ if annotations.is_empty() {
+ return None;
+ }
+ Some((file_id, annotations))
+ })
+ .collect()
+ }
+}
+
+impl TestDB {
+ pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
+ *self.events.lock().unwrap() = Some(Vec::new());
+ f();
+ self.events.lock().unwrap().take().unwrap()
+ }
+
+ pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ let events = self.log(f);
+ events
+ .into_iter()
+ .filter_map(|e| match e.kind {
+ // This is pretty horrible, but `Debug` is the only way to inspect
+ // QueryDescriptor at the moment.
+ salsa::EventKind::WillExecute { database_key } => {
+ Some(format!("{:?}", database_key.debug(self)))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
new file mode 100644
index 000000000..d2f13e435
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -0,0 +1,578 @@
+mod never_type;
+mod coercion;
+mod regression;
+mod simple;
+mod patterns;
+mod traits;
+mod method_resolution;
+mod macros;
+mod display_source_code;
+mod incremental;
+mod diagnostics;
+
+use std::{collections::HashMap, env, sync::Arc};
+
+use base_db::{fixture::WithFixture, FileRange, SourceDatabaseExt};
+use expect_test::Expect;
+use hir_def::{
+ body::{Body, BodySourceMap, SyntheticSyntax},
+ db::DefDatabase,
+ expr::{ExprId, PatId},
+ item_scope::ItemScope,
+ nameres::DefMap,
+ src::HasSource,
+ AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
+};
+use hir_expand::{db::AstDatabase, InFile};
+use once_cell::race::OnceBool;
+use stdx::format_to;
+use syntax::{
+ ast::{self, AstNode, HasName},
+ SyntaxNode,
+};
+use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
+use tracing_tree::HierarchicalLayer;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ infer::{Adjustment, TypeMismatch},
+ test_db::TestDB,
+ InferenceResult, Ty,
+};
+
+// These tests compare the inference results for all expressions in a file
+// against snapshots of the expected results using expect. Use
+// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
+
+fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
+ static ENABLE: OnceBool = OnceBool::new();
+ if !ENABLE.get_or_init(|| env::var("CHALK_DEBUG").is_ok()) {
+ return None;
+ }
+
+ let filter = EnvFilter::from_env("CHALK_DEBUG");
+ let layer = HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(std::io::stderr);
+ let subscriber = Registry::default().with(filter).with(layer);
+ Some(tracing::subscriber::set_default(subscriber))
+}
+
+fn check_types(ra_fixture: &str) {
+ check_impl(ra_fixture, false, true, false)
+}
+
+fn check_types_source_code(ra_fixture: &str) {
+ check_impl(ra_fixture, false, true, true)
+}
+
+fn check_no_mismatches(ra_fixture: &str) {
+ check_impl(ra_fixture, true, false, false)
+}
+
+fn check(ra_fixture: &str) {
+ check_impl(ra_fixture, false, false, false)
+}
+
+fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
+ let _tracing = setup_tracing();
+ let (db, files) = TestDB::with_many_files(ra_fixture);
+
+ let mut had_annotations = false;
+ let mut mismatches = HashMap::new();
+ let mut types = HashMap::new();
+ let mut adjustments = HashMap::<_, Vec<_>>::new();
+ for (file_id, annotations) in db.extract_annotations() {
+ for (range, expected) in annotations {
+ let file_range = FileRange { file_id, range };
+ if only_types {
+ types.insert(file_range, expected);
+ } else if expected.starts_with("type: ") {
+ types.insert(file_range, expected.trim_start_matches("type: ").to_string());
+ } else if expected.starts_with("expected") {
+ mismatches.insert(file_range, expected);
+ } else if expected.starts_with("adjustments: ") {
+ adjustments.insert(
+ file_range,
+ expected
+ .trim_start_matches("adjustments: ")
+ .split(',')
+ .map(|it| it.trim().to_string())
+ .filter(|it| !it.is_empty())
+ .collect(),
+ );
+ } else {
+ panic!("unexpected annotation: {}", expected);
+ }
+ had_annotations = true;
+ }
+ }
+ assert!(had_annotations || allow_none, "no `//^` annotations found");
+
+ let mut defs: Vec<DefWithBodyId> = Vec::new();
+ for file_id in files {
+ let module = db.module_for_file_opt(file_id);
+ let module = match module {
+ Some(m) => m,
+ None => continue,
+ };
+ let def_map = module.def_map(&db);
+ visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
+ }
+ defs.sort_by_key(|def| match def {
+ DefWithBodyId::FunctionId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::ConstId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::StaticId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ });
+ let mut unexpected_type_mismatches = String::new();
+ for def in defs {
+ let (_body, body_source_map) = db.body_with_source_map(def);
+ let inference_result = db.infer(def);
+
+ for (pat, ty) in inference_result.type_of_pat.iter() {
+ let node = match pat_node(&body_source_map, pat, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ if let Some(expected) = types.remove(&range) {
+ let actual = if display_source {
+ ty.display_source_code(&db, def.module(&db)).unwrap()
+ } else {
+ ty.display_test(&db).to_string()
+ };
+ assert_eq!(actual, expected);
+ }
+ }
+
+ for (expr, ty) in inference_result.type_of_expr.iter() {
+ let node = match expr_node(&body_source_map, expr, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ if let Some(expected) = types.remove(&range) {
+ let actual = if display_source {
+ ty.display_source_code(&db, def.module(&db)).unwrap()
+ } else {
+ ty.display_test(&db).to_string()
+ };
+ assert_eq!(actual, expected);
+ }
+ if let Some(expected) = adjustments.remove(&range) {
+ if let Some(adjustments) = inference_result.expr_adjustments.get(&expr) {
+ assert_eq!(
+ expected,
+ adjustments
+ .iter()
+ .map(|Adjustment { kind, .. }| format!("{:?}", kind))
+ .collect::<Vec<_>>()
+ );
+ } else {
+ panic!("expected {:?} adjustments, found none", expected);
+ }
+ }
+ }
+
+ for (pat, mismatch) in inference_result.pat_type_mismatches() {
+ let node = match pat_node(&body_source_map, pat, &db) {
+ Some(value) => value,
+ None => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ let actual = format!(
+ "expected {}, got {}",
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db)
+ );
+ match mismatches.remove(&range) {
+ Some(annotation) => assert_eq!(actual, annotation),
+ None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
+ }
+ }
+ for (expr, mismatch) in inference_result.expr_type_mismatches() {
+ let node = match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ let range = node.as_ref().original_file_range(&db);
+ let actual = format!(
+ "expected {}, got {}",
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db)
+ );
+ match mismatches.remove(&range) {
+ Some(annotation) => assert_eq!(actual, annotation),
+ None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
+ }
+ }
+ }
+
+ let mut buf = String::new();
+ if !unexpected_type_mismatches.is_empty() {
+ format_to!(buf, "Unexpected type mismatches:\n{}", unexpected_type_mismatches);
+ }
+ if !mismatches.is_empty() {
+ format_to!(buf, "Unchecked mismatch annotations:\n");
+ for m in mismatches {
+ format_to!(buf, "{:?}: {}\n", m.0.range, m.1);
+ }
+ }
+ if !types.is_empty() {
+ format_to!(buf, "Unchecked type annotations:\n");
+ for t in types {
+ format_to!(buf, "{:?}: type {}\n", t.0.range, t.1);
+ }
+ }
+ if !adjustments.is_empty() {
+ format_to!(buf, "Unchecked adjustments annotations:\n");
+ for t in adjustments {
+ format_to!(buf, "{:?}: type {:?}\n", t.0.range, t.1);
+ }
+ }
+ assert!(buf.is_empty(), "{}", buf);
+}
+
+fn expr_node(
+ body_source_map: &BodySourceMap,
+ expr: ExprId,
+ db: &TestDB,
+) -> Option<InFile<SyntaxNode>> {
+ Some(match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => return None,
+ })
+}
+
+fn pat_node(
+ body_source_map: &BodySourceMap,
+ pat: PatId,
+ db: &TestDB,
+) -> Option<InFile<SyntaxNode>> {
+ Some(match body_source_map.pat_syntax(pat) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| {
+ ptr.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ })
+ }
+ Err(SyntheticSyntax) => return None,
+ })
+}
+
+fn infer(ra_fixture: &str) -> String {
+ infer_with_mismatches(ra_fixture, false)
+}
+
+fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
+ let _tracing = setup_tracing();
+ let (db, file_id) = TestDB::with_single_file(content);
+
+ let mut buf = String::new();
+
+ let mut infer_def = |inference_result: Arc<InferenceResult>,
+ body_source_map: Arc<BodySourceMap>| {
+ let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
+ let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
+
+ for (pat, ty) in inference_result.type_of_pat.iter() {
+ let syntax_ptr = match body_source_map.pat_syntax(pat) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| {
+ ptr.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ })
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((syntax_ptr.clone(), ty));
+ if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat) {
+ mismatches.push((syntax_ptr, mismatch));
+ }
+ }
+
+ for (expr, ty) in inference_result.type_of_expr.iter() {
+ let node = match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((node.clone(), ty));
+ if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
+ mismatches.push((node, mismatch));
+ }
+ }
+
+ // sort ranges for consistency
+ types.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (node, ty) in &types {
+ let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
+ (self_param.name().unwrap().syntax().text_range(), "self".to_string())
+ } else {
+ (node.value.text_range(), node.value.text().to_string().replace('\n', " "))
+ };
+ let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?} '{}': {}\n",
+ macro_prefix,
+ range,
+ ellipsize(text, 15),
+ ty.display_test(&db)
+ );
+ }
+ if include_mismatches {
+ mismatches.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (src_ptr, mismatch) in &mismatches {
+ let range = src_ptr.value.text_range();
+ let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?}: expected {}, got {}\n",
+ macro_prefix,
+ range,
+ mismatch.expected.display_test(&db),
+ mismatch.actual.display_test(&db),
+ );
+ }
+ }
+ };
+
+ let module = db.module_for_file(file_id);
+ let def_map = module.def_map(&db);
+
+ let mut defs: Vec<DefWithBodyId> = Vec::new();
+ visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
+ defs.sort_by_key(|def| match def {
+ DefWithBodyId::FunctionId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::ConstId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ DefWithBodyId::StaticId(it) => {
+ let loc = it.lookup(&db);
+ loc.source(&db).value.syntax().text_range().start()
+ }
+ });
+ for def in defs {
+ let (_body, source_map) = db.body_with_source_map(def);
+ let infer = db.infer(def);
+ infer_def(infer, source_map);
+ }
+
+ buf.truncate(buf.trim_end().len());
+ buf
+}
+
+fn visit_module(
+ db: &TestDB,
+ crate_def_map: &DefMap,
+ module_id: LocalModuleId,
+ cb: &mut dyn FnMut(DefWithBodyId),
+) {
+ visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
+ for impl_id in crate_def_map[module_id].scope.impls() {
+ let impl_data = db.impl_data(impl_id);
+ for &item in impl_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ AssocItemId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+
+ fn visit_scope(
+ db: &TestDB,
+ crate_def_map: &DefMap,
+ scope: &ItemScope,
+ cb: &mut dyn FnMut(DefWithBodyId),
+ ) {
+ for decl in scope.declarations() {
+ match decl {
+ ModuleDefId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::StaticId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_body(db, &body, cb);
+ }
+ ModuleDefId::TraitId(it) => {
+ let trait_data = db.trait_data(it);
+ for &(_, item) in trait_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => cb(it.into()),
+ AssocItemId::ConstId(it) => cb(it.into()),
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+ ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
+ _ => (),
+ }
+ }
+ }
+
+ fn visit_body(db: &TestDB, body: &Body, cb: &mut dyn FnMut(DefWithBodyId)) {
+ for (_, def_map) in body.blocks(db) {
+ for (mod_id, _) in def_map.modules() {
+ visit_module(db, &def_map, mod_id, cb);
+ }
+ }
+ }
+}
+
+fn ellipsize(mut text: String, max_len: usize) -> String {
+ if text.len() <= max_len {
+ return text;
+ }
+ let ellipsis = "...";
+ let e_len = ellipsis.len();
+ let mut prefix_len = (max_len - e_len) / 2;
+ while !text.is_char_boundary(prefix_len) {
+ prefix_len += 1;
+ }
+ let mut suffix_len = max_len - e_len - prefix_len;
+ while !text.is_char_boundary(text.len() - suffix_len) {
+ suffix_len += 1;
+ }
+ text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
+ text
+}
+
+fn check_infer(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer(ra_fixture);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
+
+fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer_with_mismatches(ra_fixture, true);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn salsa_bug() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+ //- /lib.rs
+ trait Index {
+ type Output;
+ }
+
+ type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+ pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+ }
+
+ pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+ }
+
+ fn main() {
+ let x = 1;
+ x.push(1);$0
+ }
+ ",
+ );
+
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+
+ let new_text = "
+ //- /lib.rs
+ trait Index {
+ type Output;
+ }
+
+ type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+ pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+ }
+
+ pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+ }
+
+ fn main() {
+
+ let x = 1;
+ x.push(1);
+ }
+ "
+ .to_string();
+
+ db.set_file_text(pos.file_id, Arc::new(new_text));
+
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
new file mode 100644
index 000000000..bf59fadc2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
@@ -0,0 +1,755 @@
+use super::{check, check_no_mismatches, check_types};
+
+#[test]
+fn block_expr_type_mismatch() {
+ check(
+ r"
+fn test() {
+ let a: i32 = { 1i64 };
+ // ^^^^ expected i32, got i64
+}
+ ",
+ );
+}
+
+#[test]
+fn coerce_places() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct S<T> { a: T }
+
+fn f<T>(_: &[T]) -> T { loop {} }
+fn g<T>(_: S<&[T]>) -> T { loop {} }
+
+fn gen<T>() -> *mut [T; 2] { loop {} }
+fn test1<U>() -> *mut [U] {
+ gen()
+}
+
+fn test2() {
+ let arr: &[u8; 1] = &[1];
+
+ let a: &[_] = arr;
+ let b = f(arr);
+ let c: &[_] = { arr };
+ let d = g(S { a: arr });
+ let e: [&[_]; 1] = [arr];
+ let f: [&[_]; 2] = [arr; 2];
+ let g: (&[_], &[_]) = (arr, arr);
+}
+"#,
+ );
+}
+
+#[test]
+fn let_stmt_coerce() {
+ check(
+ r"
+//- minicore: coerce_unsized
+fn test() {
+ let x: &[isize] = &[1];
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ let x: *const [isize] = &[1];
+ // ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
+}
+",
+ );
+}
+
+#[test]
+fn custom_coerce_unsized() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+use core::{marker::Unsize, ops::CoerceUnsized};
+
+struct A<T: ?Sized>(*const T);
+struct B<T: ?Sized>(*const T);
+struct C<T: ?Sized> { inner: *const T }
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<C<U>> for C<T> {}
+
+fn foo1<T>(x: A<[T]>) -> A<[T]> { x }
+fn foo2<T>(x: B<[T]>) -> B<[T]> { x }
+fn foo3<T>(x: C<[T]>) -> C<[T]> { x }
+
+fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
+ let d = foo1(a);
+ // ^ expected A<[{unknown}]>, got A<[u8; 2]>
+ let e = foo2(b);
+ // ^ type: B<[u8]>
+ let f = foo3(c);
+ // ^ type: C<[u8]>
+}
+"#,
+ );
+}
+
+#[test]
+fn if_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test() {
+ let x = if true {
+ foo(&[1])
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ } else {
+ &[1]
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn if_else_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test() {
+ let x = if true {
+ &[1]
+ } else {
+ foo(&[1])
+ };
+}
+"#,
+ )
+}
+
+#[test]
+fn match_first_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { x }
+fn test(i: i32) {
+ let x = match i {
+ 2 => foo(&[2]),
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
+ 1 => &[1],
+ _ => &[3],
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn match_second_coerce() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn foo<T>(x: &[T]) -> &[T] { loop {} }
+ // ^^^^^^^ adjustments: NeverToAny
+fn test(i: i32) {
+ let x = match i {
+ 1 => &[1],
+ 2 => foo(&[2]),
+ _ => &[3],
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_merge_one_by_one1() {
+ cov_mark::check!(coerce_merge_fail_fallback);
+
+ check(
+ r"
+fn test() {
+ let t = &mut 1;
+ let x = match 1 {
+ 1 => t as *mut i32,
+ 2 => t as &i32,
+ //^^^^^^^^^ expected *mut i32, got &i32
+ _ => t as *const i32,
+ // ^^^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
+
+ };
+ x;
+ //^ type: *const i32
+
+}
+ ",
+ );
+}
+
+#[test]
+fn return_coerce_unknown() {
+ check_types(
+ r"
+fn foo() -> u32 {
+ return unknown;
+ //^^^^^^^ u32
+}
+ ",
+ );
+}
+
+#[test]
+fn coerce_autoderef() {
+ check_no_mismatches(
+ r"
+struct Foo;
+fn takes_ref_foo(x: &Foo) {}
+fn test() {
+ takes_ref_foo(&Foo);
+ takes_ref_foo(&&Foo);
+ takes_ref_foo(&&&Foo);
+}",
+ );
+}
+
+#[test]
+fn coerce_autoderef_generic() {
+ check_no_mismatches(
+ r#"
+struct Foo;
+fn takes_ref<T>(x: &T) -> T { *x }
+fn test() {
+ takes_ref(&Foo);
+ takes_ref(&&Foo);
+ takes_ref(&&&Foo);
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_autoderef_block() {
+ check_no_mismatches(
+ r#"
+//- minicore: deref
+struct String {}
+impl core::ops::Deref for String { type Target = str; }
+fn takes_ref_str(x: &str) {}
+fn returns_string() -> String { loop {} }
+fn test() {
+ takes_ref_str(&{ returns_string() });
+ // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_autoderef_implication_1() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct Foo<T>;
+impl core::ops::Deref for Foo<u32> { type Target = (); }
+
+fn takes_ref_foo<T>(x: &Foo<T>) {}
+fn test() {
+ let foo = Foo;
+ //^^^ type: Foo<{unknown}>
+ takes_ref_foo(&foo);
+
+ let foo = Foo;
+ //^^^ type: Foo<u32>
+ let _: &() = &foo;
+}",
+ );
+}
+
+#[test]
+fn coerce_autoderef_implication_2() {
+ check(
+ r"
+//- minicore: deref
+struct Foo<T>;
+impl core::ops::Deref for Foo<u32> { type Target = (); }
+
+fn takes_ref_foo<T>(x: &Foo<T>) {}
+fn test() {
+ let foo = Foo;
+ //^^^ type: Foo<{unknown}>
+ let _: &u32 = &Foo;
+ //^^^^ expected &u32, got &Foo<{unknown}>
+}",
+ );
+}
+
+#[test]
+fn closure_return_coerce() {
+ check_no_mismatches(
+ r"
+fn foo() {
+ let x = || {
+ if true {
+ return &1u32;
+ }
+ &&1u32
+ };
+}",
+ );
+}
+
+#[test]
+fn assign_coerce() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct String;
+impl core::ops::Deref for String { type Target = str; }
+fn g(_text: &str) {}
+fn f(text: &str) {
+ let mut text = text;
+ let tmp = String;
+ text = &tmp;
+ g(text);
+}
+",
+ );
+}
+
+#[test]
+fn destructuring_assign_coerce() {
+ check_no_mismatches(
+ r"
+//- minicore: deref
+struct String;
+impl core::ops::Deref for String { type Target = str; }
+fn g(_text: &str) {}
+fn f(text: &str) {
+ let mut text = text;
+ let tmp = String;
+ [text, _] = [&tmp, &tmp];
+ g(text);
+}
+",
+ );
+}
+
+#[test]
+fn coerce_fn_item_to_fn_ptr() {
+ check_no_mismatches(
+ r"
+fn foo(x: u32) -> isize { 1 }
+fn test() {
+ let f: fn(u32) -> isize = foo;
+ // ^^^ adjustments: Pointer(ReifyFnPointer)
+ let f: unsafe fn(u32) -> isize = foo;
+ // ^^^ adjustments: Pointer(ReifyFnPointer)
+}",
+ );
+}
+
+#[test]
+fn coerce_fn_items_in_match_arms() {
+ cov_mark::check!(coerce_fn_reification);
+
+ check_types(
+ r"
+fn foo1(x: u32) -> isize { 1 }
+fn foo2(x: u32) -> isize { 2 }
+fn foo3(x: u32) -> isize { 3 }
+fn test() {
+ let x = match 1 {
+ 1 => foo1,
+ 2 => foo2,
+ _ => foo3,
+ };
+ x;
+ //^ fn(u32) -> isize
+}",
+ );
+}
+
+#[test]
+fn coerce_closure_to_fn_ptr() {
+ check_no_mismatches(
+ r"
+fn test() {
+ let f: fn(u32) -> isize = |x| { 1 };
+}",
+ );
+}
+
+#[test]
+fn coerce_placeholder_ref() {
+ // placeholders should unify, even behind references
+ check_no_mismatches(
+ r"
+struct S<T> { t: T }
+impl<TT> S<TT> {
+ fn get(&self) -> &TT {
+ &self.t
+ }
+}",
+ );
+}
+
+#[test]
+fn coerce_unsize_array() {
+ check_types(
+ r#"
+//- minicore: coerce_unsized
+fn test() {
+ let f: &[usize] = &[1, 2, 3];
+ //^ usize
+}"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_trait_object_simple() {
+ check_types(
+ r#"
+//- minicore: coerce_unsized
+trait Foo<T, U> {}
+trait Bar<U, T, X>: Foo<T, U> {}
+trait Baz<T, X>: Bar<usize, T, X> {}
+
+struct S<T, X>;
+impl<T, X> Foo<T, usize> for S<T, X> {}
+impl<T, X> Bar<usize, T, X> for S<T, X> {}
+impl<T, X> Baz<T, X> for S<T, X> {}
+
+fn test() {
+ let obj: &dyn Baz<i8, i16> = &S;
+ //^ S<i8, i16>
+ let obj: &dyn Bar<_, i8, i16> = &S;
+ //^ S<i8, i16>
+ let obj: &dyn Foo<i8, _> = &S;
+ //^ S<i8, {unknown}>
+}"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_super_trait_cycle() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+trait A {}
+trait B: C + A {}
+trait C: B {}
+trait D: C
+
+struct S;
+impl A for S {}
+impl B for S {}
+impl C for S {}
+impl D for S {}
+
+fn test() {
+ let obj: &dyn D = &S;
+ let obj: &dyn A = &S;
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_generic() {
+ // FIXME: fix the type mismatches here
+ check(
+ r#"
+//- minicore: coerce_unsized
+struct Foo<T> { t: T };
+struct Bar<T>(Foo<T>);
+
+fn test() {
+ let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
+ //^^^^^^^^^ expected [usize], got [usize; 3]
+ let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
+ //^^^^^^^^^ expected [usize], got [usize; 3]
+}
+"#,
+ );
+}
+
+#[test]
+fn coerce_unsize_apit() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+trait Foo {}
+
+fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
+ let _: &dyn Foo = &f;
+ let _: &dyn Foo = g;
+ //^ expected &dyn Foo, got &impl Foo + ?Sized
+}
+ "#,
+ );
+}
+
+#[test]
+fn two_closures_lub() {
+ check_types(
+ r#"
+fn foo(c: i32) {
+ let add = |a: i32, b: i32| a + b;
+ let sub = |a, b| a - b;
+ //^^^^^^^^^^^^ |i32, i32| -> i32
+ if c > 42 { add } else { sub };
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ fn(i32, i32) -> i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn match_diverging_branch_1() {
+ check_types(
+ r#"
+enum Result<T> { Ok(T), Err }
+fn parse<T>() -> T { loop {} }
+
+fn test() -> i32 {
+ let a = match parse() {
+ Ok(val) => val,
+ Err => return 0,
+ };
+ a
+ //^ i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn match_diverging_branch_2() {
+ // same as 1 except for order of branches
+ check_types(
+ r#"
+enum Result<T> { Ok(T), Err }
+fn parse<T>() -> T { loop {} }
+
+fn test() -> i32 {
+ let a = match parse() {
+ Err => return 0,
+ Ok(val) => val,
+ };
+ a
+ //^ i32
+}
+ "#,
+ )
+}
+
+#[test]
+fn panic_macro() {
+ check_no_mismatches(
+ r#"
+mod panic {
+ #[macro_export]
+ pub macro panic_2015 {
+ () => (
+ $crate::panicking::panic()
+ ),
+ }
+}
+
+mod panicking {
+ pub fn panic() -> ! { loop {} }
+}
+
+#[rustc_builtin_macro = "core_panic"]
+macro_rules! panic {
+ // Expands to either `$crate::panic::panic_2015` or `$crate::panic::panic_2021`
+ // depending on the edition of the caller.
+ ($($arg:tt)*) => {
+ /* compiler built-in */
+ };
+}
+
+fn main() {
+ panic!()
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_1() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+fn main() {
+ let foo: &[u32] = &[1, 2];
+ let foo: &[u32] = match true {
+ true => &[1, 2],
+ false => &[1, 2, 3],
+ };
+ let foo: &[u32] = if true {
+ &[1, 2]
+ } else {
+ &[1, 2, 3]
+ };
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_2() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct InFile<T>;
+impl<T> InFile<T> {
+ fn with_value<U>(self, value: U) -> InFile<U> { InFile }
+}
+struct RecordField;
+trait AstNode {}
+impl AstNode for RecordField {}
+
+fn takes_dyn(it: InFile<&dyn AstNode>) {}
+
+fn test() {
+ let x: InFile<()> = InFile;
+ let n = &RecordField;
+ takes_dyn(x.with_value(n));
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_3() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+enum Option<T> { Some(T), None }
+struct RecordField;
+trait AstNode {}
+impl AstNode for RecordField {}
+
+fn takes_dyn(it: Option<&dyn AstNode>) {}
+
+fn test() {
+ let x: InFile<()> = InFile;
+ let n = &RecordField;
+ takes_dyn(Option::Some(n));
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_unsize_expected_type_4() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+use core::{marker::Unsize, ops::CoerceUnsized};
+
+struct B<T: ?Sized>(*const T);
+impl<T: ?Sized> B<T> {
+ fn new(t: T) -> Self { B(&t) }
+}
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
+
+fn test() {
+ let _: B<[isize]> = B::new({ [1, 2, 3] });
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_array_elems_lub() {
+ check_no_mismatches(
+ r#"
+fn f() {}
+fn g() {}
+
+fn test() {
+ [f, g];
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_type_var() {
+ check_types(
+ r#"
+//- minicore: from, coerce_unsized
+fn test() {
+ let x = ();
+ let _: &() = &x.into();
+} //^^^^^^^^ ()
+"#,
+ )
+}
+
+#[test]
+fn coerce_overloaded_binary_op_rhs() {
+ check_types(
+ r#"
+//- minicore: deref, add
+
+struct String {}
+impl core::ops::Deref for String { type Target = str; }
+
+impl core::ops::Add<&str> for String {
+ type Output = String;
+}
+
+fn test() {
+ let s1 = String {};
+ let s2 = String {};
+ s1 + &s2;
+ //^^^^^^^^ String
+}
+
+ "#,
+ );
+}
+
+#[test]
+fn assign_coerce_struct_fields() {
+ check_no_mismatches(
+ r#"
+//- minicore: coerce_unsized
+struct S;
+trait Tr {}
+impl Tr for S {}
+struct V<T> { t: T }
+
+fn main() {
+ let a: V<&dyn Tr>;
+ a = V { t: &S };
+
+ let mut a: V<&dyn Tr> = V { t: &S };
+ a = V { t: &S };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assign_coerce_struct_fields() {
+ check(
+ r#"
+//- minicore: coerce_unsized
+struct S;
+trait Tr {}
+impl Tr for S {}
+struct V<T> { t: T }
+
+fn main() {
+ let a: V<&dyn Tr>;
+ (a,) = V { t: &S };
+ //^^^^expected V<&S>, got (V<&dyn Tr>,)
+
+ let mut a: V<&dyn Tr> = V { t: &S };
+ (a,) = V { t: &S };
+ //^^^^expected V<&S>, got (V<&dyn Tr>,)
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
new file mode 100644
index 000000000..f00fa9729
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
@@ -0,0 +1,75 @@
+use super::check;
+
+#[test]
+fn function_return_type_mismatch_1() {
+ check(
+ r#"
+fn test() -> &'static str {
+ 5
+ //^ expected &str, got i32
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_2() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ return 1;
+ //^ expected &str, got i32
+ }
+ "ok"
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_3() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ return "ok";
+ }
+ 1
+ //^ expected &str, got i32
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_4() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ "ok"
+ } else {
+ 1
+ //^ expected &str, got i32
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn function_return_type_mismatch_5() {
+ check(
+ r#"
+fn test(x: bool) -> &'static str {
+ if x {
+ 1
+ //^ expected &str, got i32
+ } else {
+ "ok"
+ }
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
new file mode 100644
index 000000000..240942e48
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
@@ -0,0 +1,176 @@
+use super::check_types_source_code;
+
+#[test]
+fn qualify_path_to_submodule() {
+ check_types_source_code(
+ r#"
+mod foo {
+ pub struct Foo;
+}
+
+fn bar() {
+ let foo: foo::Foo = foo::Foo;
+ foo;
+} //^^^ foo::Foo
+
+"#,
+ );
+}
+
+#[test]
+fn omit_default_type_parameters() {
+ check_types_source_code(
+ r#"
+struct Foo<T = u8> { t: T }
+fn main() {
+ let foo = Foo { t: 5u8 };
+ foo;
+} //^^^ Foo
+"#,
+ );
+
+ check_types_source_code(
+ r#"
+struct Foo<K, T = u8> { k: K, t: T }
+fn main() {
+ let foo = Foo { k: 400, t: 5u8 };
+ foo;
+} //^^^ Foo<i32>
+"#,
+ );
+}
+
+#[test]
+fn render_raw_ptr_impl_ty() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Unpin {}
+fn foo() -> *const (impl Unpin + Sized) { loop {} }
+fn main() {
+ let foo = foo();
+ foo;
+} //^^^ *const impl Unpin
+"#,
+ );
+}
+
+#[test]
+fn render_dyn_for_ty() {
+ // FIXME
+ check_types_source_code(
+ r#"
+trait Foo<'a> {}
+
+fn foo(foo: &dyn for<'a> Foo<'a>) {}
+ // ^^^ &dyn Foo
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_apit() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+trait Bar<T> {}
+struct S<T>;
+fn test(
+ a: impl Foo,
+ b: impl Foo + Sized,
+ c: &(impl Foo + ?Sized),
+ d: S<impl Foo>,
+ ref_any: &impl ?Sized,
+ empty: impl,
+) {
+ a;
+ //^ impl Foo
+ b;
+ //^ impl Foo
+ c;
+ //^ &impl Foo + ?Sized
+ d;
+ //^ S<impl Foo>
+ ref_any;
+ //^^^^^^^ &impl ?Sized
+ empty;
+} //^^^^^ impl Sized
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_rpit() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo1() -> impl Foo { loop {} }
+fn foo2() -> impl Foo + Sized { loop {} }
+fn foo3() -> impl Foo + ?Sized { loop {} }
+fn test() {
+ let foo = foo1();
+ foo;
+ //^^^ impl Foo
+ let foo = foo2();
+ foo;
+ //^^^ impl Foo
+ let foo = foo3();
+ foo;
+} //^^^ impl Foo + ?Sized
+"#,
+ );
+}
+
+#[test]
+fn parenthesize_ptr_rpit_sized_bounds() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo1() -> *const impl Foo { loop {} }
+fn foo2() -> *const (impl Foo + Sized) { loop {} }
+fn foo3() -> *const (impl Sized + Foo) { loop {} }
+fn foo4() -> *const (impl Foo + ?Sized) { loop {} }
+fn foo5() -> *const (impl ?Sized + Foo) { loop {} }
+fn test() {
+ let foo = foo1();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo2();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo3();
+ foo;
+ //^^^ *const impl Foo
+ let foo = foo4();
+ foo;
+ //^^^ *const (impl Foo + ?Sized)
+ let foo = foo5();
+ foo;
+} //^^^ *const (impl Foo + ?Sized)
+"#,
+ );
+}
+
+#[test]
+fn sized_bounds_impl_traits_in_fn_signature() {
+ check_types_source_code(
+ r#"
+//- minicore: sized
+trait Foo {}
+fn test(
+ a: fn(impl Foo) -> impl Foo,
+ b: fn(impl Foo + Sized) -> impl Foo + Sized,
+ c: fn(&(impl Foo + ?Sized)) -> &(impl Foo + ?Sized),
+) {
+ a;
+ //^ fn(impl Foo) -> impl Foo
+ b;
+ //^ fn(impl Foo) -> impl Foo
+ c;
+} //^ fn(&impl Foo + ?Sized) -> &impl Foo + ?Sized
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
new file mode 100644
index 000000000..3e08e83e8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -0,0 +1,51 @@
+use std::sync::Arc;
+
+use base_db::{fixture::WithFixture, SourceDatabaseExt};
+
+use crate::{db::HirDatabase, test_db::TestDB};
+
+use super::visit_module;
+
+#[test]
+fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+ //- /lib.rs
+ fn foo() -> i32 {
+ $01 + 1
+ }
+ ",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{:?}", events).contains("infer"))
+ }
+
+ let new_text = "
+ fn foo() -> i32 {
+ 1
+ +
+ 1
+ }
+ "
+ .to_string();
+
+ db.set_file_text(pos.file_id, Arc::new(new_text));
+
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
new file mode 100644
index 000000000..a1ab6060e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
@@ -0,0 +1,1338 @@
+use expect_test::expect;
+use test_utils::{bench, bench_fixture, skip_slow_tests};
+
+use crate::tests::check_infer_with_mismatches;
+
+use super::{check_infer, check_types};
+
+#[test]
+fn cfg_impl_def() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo cfg:test
+use foo::S as T;
+struct S;
+
+#[cfg(test)]
+impl S {
+ fn foo1(&self) -> i32 { 0 }
+}
+
+#[cfg(not(test))]
+impl S {
+ fn foo2(&self) -> i32 { 0 }
+}
+
+fn test() {
+ let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4());
+ t;
+} //^ (i32, {unknown}, i32, {unknown})
+
+//- /foo.rs crate:foo
+pub struct S;
+
+#[cfg(not(test))]
+impl S {
+ pub fn foo3(&self) -> i32 { 0 }
+}
+
+#[cfg(test)]
+impl S {
+ pub fn foo4(&self) -> i32 { 0 }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 155..181 '{ ...,2); }': ()
+ 165..166 'x': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn infer_legacy_textual_scoped_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ #[macro_use]
+ mod m {
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ let y = crate::foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 194..250 '{ ...,2); }': ()
+ 204..205 'x': Foo
+ 227..228 'y': {unknown}
+ 231..247 'crate:...!(1,2)': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_qualified_macros_expanded() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => { 42i32 }
+ }
+
+ mod m {
+ pub use super::foo as bar;
+ }
+
+ fn main() {
+ let x = crate::foo!();
+ let y = m::bar!();
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ !0..5 '42i32': i32
+ 110..163 '{ ...!(); }': ()
+ 120..121 'x': i32
+ 147..148 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_def_expanded_in_various_places() {
+ check_infer(
+ r#"
+ macro spam() {
+ 1isize
+ }
+
+ fn spam() {
+ spam!();
+ (spam!());
+ spam!().spam(spam!());
+ for _ in spam!() {}
+ || spam!();
+ while spam!() {}
+ break spam!();
+ return spam!();
+ match spam!() {
+ _ if spam!() => spam!(),
+ }
+ spam!()(spam!());
+ Spam { spam: spam!() };
+ spam!()[spam!()];
+ await spam!();
+ spam!() as usize;
+ &spam!();
+ -spam!();
+ spam!()..spam!();
+ spam!() + spam!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ 39..442 '{ ...!(); }': ()
+ 73..94 'spam!(...am!())': {unknown}
+ 100..119 'for _ ...!() {}': ()
+ 104..105 '_': {unknown}
+ 117..119 '{}': ()
+ 124..134 '|| spam!()': || -> isize
+ 140..156 'while ...!() {}': ()
+ 154..156 '{}': ()
+ 161..174 'break spam!()': !
+ 180..194 'return spam!()': !
+ 200..254 'match ... }': isize
+ 224..225 '_': isize
+ 259..275 'spam!(...am!())': {unknown}
+ 281..303 'Spam {...m!() }': {unknown}
+ 309..325 'spam!(...am!()]': {unknown}
+ 350..366 'spam!(... usize': usize
+ 372..380 '&spam!()': &isize
+ 386..394 '-spam!()': isize
+ 400..416 'spam!(...pam!()': {unknown}
+ 422..439 'spam!(...pam!()': isize
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_rules_expanded_in_various_places() {
+ check_infer(
+ r#"
+ macro_rules! spam {
+ () => (1isize);
+ }
+
+ fn spam() {
+ spam!();
+ (spam!());
+ spam!().spam(spam!());
+ for _ in spam!() {}
+ || spam!();
+ while spam!() {}
+ break spam!();
+ return spam!();
+ match spam!() {
+ _ if spam!() => spam!(),
+ }
+ spam!()(spam!());
+ Spam { spam: spam!() };
+ spam!()[spam!()];
+ await spam!();
+ spam!() as usize;
+ &spam!();
+ -spam!();
+ spam!()..spam!();
+ spam!() + spam!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ 53..456 '{ ...!(); }': ()
+ 87..108 'spam!(...am!())': {unknown}
+ 114..133 'for _ ...!() {}': ()
+ 118..119 '_': {unknown}
+ 131..133 '{}': ()
+ 138..148 '|| spam!()': || -> isize
+ 154..170 'while ...!() {}': ()
+ 168..170 '{}': ()
+ 175..188 'break spam!()': !
+ 194..208 'return spam!()': !
+ 214..268 'match ... }': isize
+ 238..239 '_': isize
+ 273..289 'spam!(...am!())': {unknown}
+ 295..317 'Spam {...m!() }': {unknown}
+ 323..339 'spam!(...am!()]': {unknown}
+ 364..380 'spam!(... usize': usize
+ 386..394 '&spam!()': &isize
+ 400..408 '-spam!()': isize
+ 414..430 'spam!(...pam!()': {unknown}
+ 436..453 'spam!(...pam!()': isize
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_expanded_in_stmts() {
+ check_infer(
+ r#"
+ macro_rules! id { ($($es:tt)*) => { $($es)* } }
+ fn foo() {
+ id! { let a = (); }
+ }
+ "#,
+ expect![[r#"
+ !0..8 'leta=();': ()
+ !3..4 'a': ()
+ !5..7 '()': ()
+ 57..84 '{ ...); } }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn recurisve_macro_expanded_in_stmts() {
+ check_infer(
+ r#"
+ macro_rules! ng {
+ ([$($tts:tt)*]) => {
+ $($tts)*;
+ };
+ ([$($tts:tt)*] $head:tt $($rest:tt)*) => {
+ ng! {
+ [$($tts)* $head] $($rest)*
+ }
+ };
+ }
+ fn foo() {
+ ng!([] let a = 3);
+ let b = a;
+ }
+ "#,
+ expect![[r#"
+ !0..7 'leta=3;': ()
+ !0..13 'ng!{[leta=3]}': ()
+ !0..13 'ng!{[leta=]3}': ()
+ !0..13 'ng!{[leta]=3}': ()
+ !0..13 'ng!{[let]a=3}': ()
+ !3..4 'a': i32
+ !5..6 '3': i32
+ 196..237 '{ ...= a; }': ()
+ 229..230 'b': i32
+ 233..234 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_inner_item_macro_rules() {
+ check_infer(
+ r#"
+ macro_rules! mac {
+ () => { mac!($)};
+ ($x:tt) => { macro_rules! blub { () => { 1 }; } };
+ }
+ fn foo() {
+ mac!();
+ let a = blub!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '1': i32
+ !0..7 'mac!($)': ()
+ !0..26 'macro_...>{1};}': ()
+ 107..143 '{ ...!(); }': ()
+ 129..130 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_macro_defining_block_with_items() {
+ check_infer(
+ r#"
+ macro_rules! foo {
+ () => {{
+ fn bar() -> usize { 0 }
+ bar()
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ !15..18 '{0}': usize
+ !16..17 '0': usize
+ !0..24 '{fnbar...bar()}': usize
+ !18..21 'bar': fn bar() -> usize
+ !18..23 'bar()': usize
+ 98..122 '{ ...!(); }': ()
+ 108..110 '_a': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_value_macro_having_same_name() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => {
+ mod foo {
+ pub use super::foo;
+ }
+ };
+ ($x:tt) => {
+ $x
+ };
+ }
+
+ foo!();
+
+ fn foo() {
+ let foo = foo::foo!(42i32);
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ 170..205 '{ ...32); }': ()
+ 180..183 'foo': i32
+ "#]],
+ );
+}
+
+#[test]
+fn processes_impls_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ ($ident:ident) => (impl Trait for $ident {})
+}
+trait Trait { fn foo(self) -> u128 { 0 } }
+struct S;
+m!(S);
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ () => (fn foo(&self) -> u128 {0})
+}
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros_chain() {
+ check_types(
+ r#"
+macro_rules! m_inner {
+ () => {fn foo(&self) -> u128 {0}}
+}
+macro_rules! m {
+ () => {m_inner!();}
+}
+
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_expr() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = (foo::foo!(1), foo::foo!(2));
+ x;
+} //^ (i32, usize)
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! foo {
+ (1) => { $crate::bar!() };
+ (2) => { 1 + $crate::baz() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+pub fn baz() -> usize { 31usize }
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::Trait;
+
+fn test() {
+ let msg = foo::Message(foo::MessageRef);
+ let r = msg.deref();
+ r;
+ //^ &MessageRef
+}
+
+//- /lib.rs crate:foo
+pub struct MessageRef;
+pub struct Message(MessageRef);
+
+pub trait Trait {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+#[macro_export]
+macro_rules! expand {
+ () => {
+ impl Trait for Message {
+ type Target = $crate::MessageRef;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ }
+}
+
+expand!();
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_in_def_site() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::expand;
+
+macro_rules! list {
+ ($($tt:tt)*) => { $($tt)* }
+}
+
+fn test() {
+ let r = expand!();
+ r;
+ //^ u128
+}
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! expand {
+ () => { list!($crate::m!()) };
+}
+
+#[macro_export]
+macro_rules! m {
+ () => { 0u128 };
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_type_value_non_legacy_macro_use_as() {
+ check_infer(
+ r#"
+ mod m {
+ macro_rules! _foo {
+ ($x:ident) => { type $x = u64; }
+ }
+ pub(crate) use _foo as foo;
+ }
+
+ m::foo!(foo);
+ use foo as bar;
+ fn f() -> bar { 0 }
+ fn main() {
+ let _a = f();
+ }
+ "#,
+ expect![[r#"
+ 158..163 '{ 0 }': u64
+ 160..161 '0': u64
+ 174..196 '{ ...f(); }': ()
+ 184..186 '_a': u64
+ 190..191 'f': fn f() -> u64
+ 190..193 'f()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_macro() {
+ check_infer(
+ r#"
+ fn main() {
+ macro_rules! foo {
+ () => { 1usize }
+ }
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1usize': usize
+ 10..89 '{ ...!(); }': ()
+ 74..76 '_a': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_inner_macros() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = foo::foo!(1);
+ x;
+} //^ i32
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! foo {
+ (1) => { bar!() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_line() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! line {() => {}}
+
+ fn main() {
+ let x = line!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_file() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! file {() => {}}
+
+ fn main() {
+ let x = file!();
+ }
+ "#,
+ expect![[r#"
+ !0..2 '""': &str
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_column() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! column {() => {}}
+
+ fn main() {
+ let x = column!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 65..91 '{ ...!(); }': ()
+ 75..76 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!("hello", concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 65..121 '{ ...")); }': ()
+ 75..76 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ bar();
+} //^^^^^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_expression() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+fn main() {
+ let i = include!("bla.rs");
+ i;
+ //^ i32
+}
+//- /bla.rs
+0
+ "#,
+ )
+}
+
+#[test]
+fn infer_builtin_macros_include_child_mod() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("f/foo.rs");
+
+fn main() {
+ bar::bar();
+} //^^^^^^^^^^ u32
+
+//- /f/foo.rs
+pub mod bar;
+
+//- /f/bar.rs
+pub fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_str() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include_str {() => {}}
+
+fn main() {
+ let a = include_str!("foo.rs");
+ a;
+} //^ &str
+
+//- /foo.rs
+hello
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_str_with_lazy_nested() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+#[rustc_builtin_macro]
+macro_rules! include_str {() => {}}
+
+macro_rules! m {
+ ($x:expr) => {
+ concat!("foo", $x)
+ };
+}
+
+fn main() {
+ let a = include_str!(m!(".rs"));
+ a;
+} //^ &str
+
+//- /foo.rs
+hello
+"#,
+ );
+}
+
+#[test]
+fn benchmark_include_macro() {
+ if skip_slow_tests() {
+ return;
+ }
+ let data = bench_fixture::big_struct();
+ let fixture = r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ RegisterBlock { };
+ //^^^^^^^^^^^^^^^^^ RegisterBlock
+}
+ "#;
+ let fixture = format!("{}\n//- /foo.rs\n{}", fixture, data);
+
+ {
+ let _b = bench("include macro");
+ check_types(&fixture);
+ }
+}
+
+#[test]
+fn infer_builtin_macros_include_concat() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+include!(concat!("f", "oo.rs"));
+
+fn main() {
+ bar();
+} //^^^^^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+include!(concat!(env!("OUT_DIR"), "/foo.rs"));
+
+fn main() {
+ bar();
+} //^^^^^ {unknown}
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_itself_should_failed() {
+ check_types(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("main.rs");
+
+fn main() {
+ 0;
+} //^ i32
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat_with_lazy() {
+ check_infer(
+ r#"
+ macro_rules! hello {() => {"hello"}}
+
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!(hello!(), concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 103..160 '{ ...")); }': ()
+ 113..114 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_env() {
+ check_infer(
+ r#"
+ //- /main.rs env:foo=bar
+ #[rustc_builtin_macro]
+ macro_rules! env {() => {}}
+
+ fn main() {
+ let x = env!("foo");
+ }
+ "#,
+ expect![[r#"
+ !0..22 '"__RA_...TED__"': &str
+ 62..90 '{ ...o"); }': ()
+ 72..73 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_derive_clone_simple() {
+ check_types(
+ r#"
+//- minicore: derive, clone
+#[derive(Clone)]
+struct S;
+fn test() {
+ S.clone();
+} //^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn infer_derive_clone_with_params() {
+ check_types(
+ r#"
+//- minicore: clone, derive
+#[derive(Clone)]
+struct S;
+#[derive(Clone)]
+struct Wrapper<T>(T);
+struct NonClone;
+fn test() {
+ let x = (Wrapper(S).clone(), Wrapper(NonClone).clone());
+ x;
+ //^ (Wrapper<S>, {unknown})
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_custom_derive_simple() {
+ // FIXME: this test current now do nothing
+ check_types(
+ r#"
+//- minicore: derive
+use foo::Foo;
+
+#[derive(Foo)]
+struct S{}
+
+fn test() {
+ S{};
+} //^^^ S
+"#,
+ );
+}
+
+#[test]
+fn macro_in_arm() {
+ check_infer(
+ r#"
+ macro_rules! unit {
+ () => { () };
+ }
+
+ fn main() {
+ let x = match () {
+ unit!() => 92u32,
+ };
+ }
+ "#,
+ expect![[r#"
+ !0..2 '()': ()
+ 51..110 '{ ... }; }': ()
+ 61..62 'x': u32
+ 65..107 'match ... }': u32
+ 71..73 '()': ()
+ 95..100 '92u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn macro_in_type_alias_position() {
+ check_infer(
+ r#"
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ trait Foo {
+ type Ty;
+ }
+
+ impl<T> Foo for T {
+ type Ty = U32!();
+ }
+
+ type TayTo = U32!();
+
+ fn testy() {
+ let a: <() as Foo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 147..196 '{ ...yTo; }': ()
+ 157..158 'a': u32
+ 185..186 'b': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_macro_in_type_alias_position() {
+ check_infer(
+ r#"
+ macro_rules! U32Inner2 {
+ () => { u32 };
+ }
+
+ macro_rules! U32Inner1 {
+ () => { U32Inner2!() };
+ }
+
+ macro_rules! U32 {
+ () => { U32Inner1!() };
+ }
+
+ trait Foo {
+ type Ty;
+ }
+
+ impl<T> Foo for T {
+ type Ty = U32!();
+ }
+
+ type TayTo = U32!();
+
+ fn testy() {
+ let a: <() as Foo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 259..308 '{ ...yTo; }': ()
+ 269..270 'a': u32
+ 297..298 'b': u32
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_alias_position_generics() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ trait Moo {
+ type Ty;
+ }
+
+ impl<T> Moo for T {
+ type Ty = Bar!();
+ }
+
+ type TayTo = Bar!();
+
+ fn main() {
+ let a: <() as Moo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 228..277 '{ ...yTo; }': ()
+ 238..239 'a': Foo<u32, u32>
+ 266..267 'b': Foo<u32, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_position() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ fn main() {
+ let a: Bar!();
+ }
+ "#,
+ expect![[r#"
+ 133..155 '{ ...!(); }': ()
+ 143..144 'a': Foo<u32, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn macros_in_type_generics() {
+ check_infer(
+ r#"
+ struct Foo<A, B>(A, B);
+
+ macro_rules! U32 {
+ () => { u32 };
+ }
+
+ macro_rules! Bar {
+ () => { Foo<U32!(), U32!()> };
+ }
+
+ trait Moo {
+ type Ty;
+ }
+
+ impl<T> Moo for T {
+ type Ty = Foo<Bar!(), Bar!()>;
+ }
+
+ type TayTo = Foo<Bar!(), U32!()>;
+
+ fn main() {
+ let a: <() as Moo>::Ty;
+ let b: TayTo;
+ }
+ "#,
+ expect![[r#"
+ 254..303 '{ ...yTo; }': ()
+ 264..265 'a': Foo<Foo<u32, u32>, Foo<u32, u32>>
+ 292..293 'b': Foo<Foo<u32, u32>, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infinitely_recursive_macro_type() {
+ check_infer(
+ r#"
+ struct Bar<T, X>(T, X);
+
+ macro_rules! Foo {
+ () => { Foo!() }
+ }
+
+ macro_rules! U32 {
+ () => { u32 }
+ }
+
+ type A = Foo!();
+ type B = Bar<Foo!(), U32!()>;
+
+ fn main() {
+ let a: A;
+ let b: B;
+ }
+ "#,
+ expect![[r#"
+ 166..197 '{ ...: B; }': ()
+ 176..177 'a': {unknown}
+ 190..191 'b': Bar<{unknown}, u32>
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_tails() {
+ check_infer_with_mismatches(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct S {}
+
+impl S {
+ fn new2(bar: u32) -> Self {
+ #[cfg(feature = "foo")]
+ { Self { } }
+ #[cfg(not(feature = "foo"))]
+ { Self { } }
+ }
+}
+"#,
+ expect![[r#"
+ 34..37 'bar': u32
+ 52..170 '{ ... }': S
+ 62..106 '#[cfg(... { } }': S
+ 96..104 'Self { }': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_1() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+#[mac::attr_macro]
+fn foo() {
+ let xxx = 1;
+ //^^^ i32
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_in_impl() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+struct Foo;
+impl Foo {
+ #[mac::attr_macro]
+ fn foo() {
+ let xxx = 1;
+ //^^^ i32
+ }
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
+
+#[test]
+fn infer_in_unexpandable_attr_proc_macro_in_trait() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:mac
+trait Foo {
+ #[mac::attr_macro]
+ fn foo() {
+ let xxx = 1;
+ //^^^ i32
+ }
+}
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+pub fn attr_macro() {}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
new file mode 100644
index 000000000..68463dc06
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -0,0 +1,1792 @@
+use expect_test::expect;
+
+use crate::tests::check;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_slice_method() {
+ check_types(
+ r#"
+impl<T> [T] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+
+fn test(x: &[u8]) {
+ <[_]>::foo(x);
+ //^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn cross_crate_primitive_method() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = 1f32;
+ x.foo();
+} //^^^^^^^ f32
+
+//- /lib.rs crate:other_crate
+mod foo {
+ impl f32 {
+ pub fn foo(self) -> f32 { 0. }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_array_inherent_impl() {
+ check_types(
+ r#"
+impl<T, const N: usize> [T; N] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+fn test(x: &[u8; 0]) {
+ <[_; 0]>::foo(x);
+ //^^^^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_associated_method_struct() {
+ check_infer(
+ r#"
+ struct A { x: u32 }
+
+ impl A {
+ fn new() -> A {
+ A { x: 0 }
+ }
+ }
+ fn test() {
+ let a = A::new();
+ a.x;
+ }
+ "#,
+ expect![[r#"
+ 48..74 '{ ... }': A
+ 58..68 'A { x: 0 }': A
+ 65..66 '0': u32
+ 87..121 '{ ...a.x; }': ()
+ 97..98 'a': A
+ 101..107 'A::new': fn new() -> A
+ 101..109 'A::new()': A
+ 115..116 'a': A
+ 115..118 'a.x': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_struct_in_local_scope() {
+ check_infer(
+ r#"
+ fn mismatch() {
+ struct A;
+
+ impl A {
+ fn from(_: i32, _: i32) -> Self {
+ A
+ }
+ }
+
+ let _a = A::from(1, 2);
+ }
+ "#,
+ expect![[r#"
+ 14..146 '{ ... 2); }': ()
+ 125..127 '_a': A
+ 130..137 'A::from': fn from(i32, i32) -> A
+ 130..143 'A::from(1, 2)': A
+ 138..139 '1': i32
+ 141..142 '2': i32
+ 60..61 '_': i32
+ 68..69 '_': i32
+ 84..109 '{ ... }': A
+ 98..99 'A': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_enum() {
+ check_infer(
+ r#"
+ enum A { B, C }
+
+ impl A {
+ pub fn b() -> A {
+ A::B
+ }
+ pub fn c() -> A {
+ A::C
+ }
+ }
+ fn test() {
+ let a = A::b();
+ a;
+ let c = A::c();
+ c;
+ }
+ "#,
+ expect![[r#"
+ 46..66 '{ ... }': A
+ 56..60 'A::B': A
+ 87..107 '{ ... }': A
+ 97..101 'A::C': A
+ 120..177 '{ ... c; }': ()
+ 130..131 'a': A
+ 134..138 'A::b': fn b() -> A
+ 134..140 'A::b()': A
+ 146..147 'a': A
+ 157..158 'c': A
+ 161..165 'A::c': fn c() -> A
+ 161..167 'A::c()': A
+ 173..174 'c': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_with_modules() {
+ check_infer(
+ r#"
+ mod a {
+ struct A;
+ impl A { pub fn thing() -> A { A {} }}
+ }
+
+ mod b {
+ struct B;
+ impl B { pub fn thing() -> u32 { 99 }}
+
+ mod c {
+ struct C;
+ impl C { pub fn thing() -> C { C {} }}
+ }
+ }
+ use b::c;
+
+ fn test() {
+ let x = a::A::thing();
+ let y = b::B::thing();
+ let z = c::C::thing();
+ }
+ "#,
+ expect![[r#"
+ 55..63 '{ A {} }': A
+ 57..61 'A {}': A
+ 125..131 '{ 99 }': u32
+ 127..129 '99': u32
+ 201..209 '{ C {} }': C
+ 203..207 'C {}': C
+ 240..324 '{ ...g(); }': ()
+ 250..251 'x': A
+ 254..265 'a::A::thing': fn thing() -> A
+ 254..267 'a::A::thing()': A
+ 277..278 'y': u32
+ 281..292 'b::B::thing': fn thing() -> u32
+ 281..294 'b::B::thing()': u32
+ 304..305 'z': C
+ 308..319 'c::C::thing': fn thing() -> C
+ 308..321 'c::C::thing()': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make(val: T) -> Gen<T> {
+ Gen { val }
+ }
+ }
+
+ fn test() {
+ let a = Gen::make(0u32);
+ }
+ "#,
+ expect![[r#"
+ 63..66 'val': T
+ 81..108 '{ ... }': Gen<T>
+ 91..102 'Gen { val }': Gen<T>
+ 97..100 'val': T
+ 122..154 '{ ...32); }': ()
+ 132..133 'a': Gen<u32>
+ 136..145 'Gen::make': fn make<u32>(u32) -> Gen<u32>
+ 136..151 'Gen::make(0u32)': Gen<u32>
+ 146..150 '0u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make() -> Gen<T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32>::make();
+ }
+ "#,
+ expect![[r#"
+ 75..99 '{ ... }': Gen<T>
+ 85..93 'loop { }': !
+ 90..93 '{ }': ()
+ 113..148 '{ ...e(); }': ()
+ 123..124 'a': Gen<u32>
+ 127..143 'Gen::<...::make': fn make<u32>() -> Gen<u32>
+ 127..145 'Gen::<...make()': Gen<u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_2_type_params_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T, U> {
+ val: T,
+ val2: U,
+ }
+
+ impl<T> Gen<u32, T> {
+ pub fn make() -> Gen<u32,T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32, u64>::make();
+ }
+ "#,
+ expect![[r#"
+ 101..125 '{ ... }': Gen<u32, T>
+ 111..119 'loop { }': !
+ 116..119 '{ }': ()
+ 139..179 '{ ...e(); }': ()
+ 149..150 'a': Gen<u32, u64>
+ 153..174 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
+ 153..176 'Gen::<...make()': Gen<u32, u64>
+ "#]],
+ );
+}
+
+#[test]
+fn cross_crate_associated_method_call() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = other_crate::foo::S::thing();
+ x;
+} //^ i128
+
+//- /lib.rs crate:other_crate
+pub mod foo {
+ pub struct S;
+ impl S {
+ pub fn thing() -> i128 { 0 }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_trait_method_simple() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait1 {
+ fn method(&self) -> u32;
+}
+struct S1;
+impl Trait1 for S1 {}
+trait Trait2 {
+ fn method(&self) -> i128;
+}
+struct S2;
+impl Trait2 for S2 {}
+fn test() {
+ S1.method();
+ //^^^^^^^^^^^ u32
+ S2.method(); // -> i128
+ //^^^^^^^^^^^ i128
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_scoped() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+struct S;
+mod foo {
+ pub trait Trait1 {
+ fn method(&self) -> u32;
+ }
+ impl Trait1 for super::S {}
+}
+mod bar {
+ pub trait Trait2 {
+ fn method(&self) -> i128;
+ }
+ impl Trait2 for super::S {}
+}
+
+mod foo_test {
+ use super::S;
+ use super::foo::Trait1;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+ }
+}
+
+mod bar_test {
+ use super::S;
+ use super::bar::Trait2;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ i128
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_1() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S;
+impl Trait<u32> for S {}
+fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_more_params() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T1, T2, T3> {
+ fn method1(&self) -> (T1, T2, T3);
+ fn method2(&self) -> (T3, T2, T1);
+}
+struct S1;
+impl Trait<u8, u16, u32> for S1 {}
+struct S2;
+impl<T> Trait<i8, i16, T> for S2 {}
+fn test() {
+ S1.method1();
+ //^^^^^^^^^^^^ (u8, u16, u32)
+ S1.method2();
+ //^^^^^^^^^^^^ (u32, u16, u8)
+ S2.method1();
+ //^^^^^^^^^^^^ (i8, i16, {unknown})
+ S2.method2();
+ //^^^^^^^^^^^^ ({unknown}, i16, i8)
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_2() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn test() {
+ S(1u32).method();
+ //^^^^^^^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method() {
+ check_infer(
+ r#"
+ trait Default {
+ fn default() -> Self;
+ }
+ struct S;
+ impl Default for S {}
+ fn test() {
+ let s1: S = Default::default();
+ let s2 = S::default();
+ let s3 = <S as Default>::default();
+ }
+ "#,
+ expect![[r#"
+ 86..192 '{ ...t(); }': ()
+ 96..98 's1': S
+ 104..120 'Defaul...efault': fn default<S>() -> S
+ 104..122 'Defaul...ault()': S
+ 132..134 's2': S
+ 137..147 'S::default': fn default<S>() -> S
+ 137..149 'S::default()': S
+ 159..161 's3': S
+ 164..187 '<S as ...efault': fn default<S>() -> S
+ 164..189 '<S as ...ault()': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_1() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> T;
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make();
+ let b = G::<u64>::make();
+ let c: f64 = G::make();
+ }
+ "#,
+ expect![[r#"
+ 126..210 '{ ...e(); }': ()
+ 136..137 'a': u32
+ 140..147 'S::make': fn make<S, u32>() -> u32
+ 140..149 'S::make()': u32
+ 159..160 'b': u64
+ 163..177 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
+ 163..179 'G::<u6...make()': u64
+ 189..190 'c': f64
+ 198..205 'G::make': fn make<G<f64>, f64>() -> f64
+ 198..207 'G::make()': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (T, U);
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make::<i64>();
+ let b: (_, i64) = S::make();
+ let c = G::<u32>::make::<i64>();
+ let d: (u32, _) = G::make::<i64>();
+ let e: (u32, i64) = G::make();
+ }
+ "#,
+ expect![[r#"
+ 134..312 '{ ...e(); }': ()
+ 144..145 'a': (u32, i64)
+ 148..162 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
+ 148..164 'S::mak...i64>()': (u32, i64)
+ 174..175 'b': (u32, i64)
+ 188..195 'S::make': fn make<S, u32, i64>() -> (u32, i64)
+ 188..197 'S::make()': (u32, i64)
+ 207..208 'c': (u32, i64)
+ 211..232 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 211..234 'G::<u3...i64>()': (u32, i64)
+ 244..245 'd': (u32, i64)
+ 258..272 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 258..274 'G::mak...i64>()': (u32, i64)
+ 284..285 'e': (u32, i64)
+ 300..307 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 300..309 'G::make()': (u32, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_3() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<i32> {}
+ fn test() {
+ let a = S::make();
+ }
+ "#,
+ expect![[r#"
+ 100..126 '{ ...e(); }': ()
+ 110..111 'a': (S<i32>, i64)
+ 114..121 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
+ 114..123 'S::make()': (S<i32>, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_4() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ impl Trait<i32> for S<u32> {}
+ fn test() {
+ let a: (S<u64>, _) = S::make();
+ let b: (_, i32) = S::make();
+ }
+ "#,
+ expect![[r#"
+ 130..202 '{ ...e(); }': ()
+ 140..141 'a': (S<u64>, i64)
+ 157..164 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
+ 157..166 'S::make()': (S<u64>, i64)
+ 176..177 'b': (S<u32>, i32)
+ 190..197 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
+ 190..199 'S::make()': (S<u32>, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_5() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (Self, T, U);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ fn test() {
+ let a = <S as Trait<i64>>::make::<u8>();
+ let b: (S<u64>, _, _) = Trait::<i64>::make::<u8>();
+ }
+ "#,
+ expect![[r#"
+ 106..210 '{ ...>(); }': ()
+ 116..117 'a': (S<u64>, i64, u8)
+ 120..149 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 120..151 '<S as ...<u8>()': (S<u64>, i64, u8)
+ 161..162 'b': (S<u64>, i64, u8)
+ 181..205 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 181..207 'Trait:...<u8>()': (S<u64>, i64, u8)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_1() {
+ check_infer(
+ r#"
+ trait Trait {
+ fn method(&self) -> u32;
+ }
+ fn test<T: Trait>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 63..64 't': T
+ 69..88 '{ ...d(); }': ()
+ 75..76 't': T
+ 75..85 't.method()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn method(&self) -> T;
+ }
+ fn test<U, T: Trait<U>>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 70..71 't': T
+ 76..95 '{ ...d(); }': ()
+ 82..83 't': T
+ 82..92 't.method()': U
+ "#]],
+ );
+}
+
+#[test]
+fn infer_with_multiple_trait_impls() {
+ check_infer(
+ r#"
+ trait Into<T> {
+ fn into(self) -> T;
+ }
+ struct S;
+ impl Into<u32> for S {}
+ impl Into<u64> for S {}
+ fn test() {
+ let x: u32 = S.into();
+ let y: u64 = S.into();
+ let z = Into::<u64>::into(S);
+ }
+ "#,
+ expect![[r#"
+ 28..32 'self': Self
+ 110..201 '{ ...(S); }': ()
+ 120..121 'x': u32
+ 129..130 'S': S
+ 129..137 'S.into()': u32
+ 147..148 'y': u64
+ 156..157 'S': S
+ 156..164 'S.into()': u64
+ 174..175 'z': u64
+ 178..195 'Into::...::into': fn into<S, u64>(S) -> u64
+ 178..198 'Into::...nto(S)': u64
+ 196..197 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_unify_impl_self_type() {
+ check_types(
+ r#"
+struct S<T>;
+impl S<u32> { fn foo(&self) -> u8 { 0 } }
+impl S<i32> { fn foo(&self) -> i8 { 0 } }
+fn test() { (S::<u32>.foo(), S::<i32>.foo()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (u8, i8)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoref() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_by_value_before_autoref() {
+ check_types(
+ r#"
+trait Clone { fn clone(&self) -> Self; }
+struct S;
+impl Clone for S {}
+impl Clone for &S {}
+fn test() { (S.clone(), (&S).clone(), (&&S).clone()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (S, S, &S)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_ref_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_unsize_array() {
+ check_types(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^^^^^^^ usize
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_from_prelude() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+struct S;
+impl Clone for S {}
+
+fn test() {
+ S.clone();
+ //^^^^^^^^^ S
+}
+
+//- /lib.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub trait Clone {
+ fn clone(&self) -> Self;
+ }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_for_unknown_trait() {
+ // The blanket impl currently applies because we ignore the unresolved where clause
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: UnknownTrait {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ // This is also to make sure that we don't resolve to the foo method just
+ // because that's the only method named foo we can find, which would make
+ // the below tests not work
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: Clone {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T: Clone> Trait for T {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_2() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U> Into<U> for T where U: From<T> {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U: From<T>> Into<U> for T {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_method() {
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub fn new(foo_: T) -> Self {
+ Wrapper(Foo(foo_))
+ }
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub fn new(bar_: T) -> Self {
+ Wrapper(Bar(bar_))
+ }
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::new(1.0);
+ let b = Wrapper::<Bar<f32>>::new(1.0);
+ (a, b);
+ //^^^^^^ (Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_const() {
+ cov_mark::check!(const_candidate_self_type_mismatch);
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub const VALUE: Foo<T>;
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub const VALUE: Bar<T>;
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::VALUE;
+ let b = Wrapper::<Bar<f32>>::VALUE;
+ (a, b);
+ //^^^^^^ (Foo<f32>, Bar<f32>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_encountering_fn_type() {
+ check_types(
+ r#"
+//- /main.rs
+fn foo() {}
+trait FnOnce { fn call(self); }
+fn test() { foo.call(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn super_trait_impl_return_trait_method_resolution() {
+ check_infer(
+ r#"
+ //- minicore: sized
+ trait Base {
+ fn foo(self) -> usize;
+ }
+
+ trait Super : Base {}
+
+ fn base1() -> impl Base { loop {} }
+ fn super1() -> impl Super { loop {} }
+
+ fn test(base2: impl Base, super2: impl Super) {
+ base1().foo();
+ super1().foo();
+ base2.foo();
+ super2.foo();
+ }
+ "#,
+ expect![[r#"
+ 24..28 'self': Self
+ 90..101 '{ loop {} }': !
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 128..139 '{ loop {} }': !
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 149..154 'base2': impl Base
+ 167..173 'super2': impl Super
+ 187..264 '{ ...o(); }': ()
+ 193..198 'base1': fn base1() -> impl Base
+ 193..200 'base1()': impl Base
+ 193..206 'base1().foo()': usize
+ 212..218 'super1': fn super1() -> impl Super
+ 212..220 'super1()': impl Super
+ 212..226 'super1().foo()': usize
+ 232..237 'base2': impl Base
+ 232..243 'base2.foo()': usize
+ 249..255 'super2': impl Super
+ 249..261 'super2.foo()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_non_parameter_type() {
+ check_types(
+ r#"
+mod a {
+ pub trait Foo {
+ fn foo(&self);
+ }
+}
+
+struct Wrapper<T>(T);
+fn foo<T>(t: Wrapper<T>)
+where
+ Wrapper<T>: a::Foo,
+{
+ t.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_3373() {
+ check_types(
+ r#"
+struct A<T>(T);
+
+impl A<i32> {
+ fn from(v: i32) -> A<i32> { A(v) }
+}
+
+fn main() {
+ A::from(3);
+} //^^^^^^^^^^ A<i32>
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_slow() {
+ // this can get quite slow if we set the solver size limit too high
+ check_types(
+ r#"
+trait SendX {}
+
+struct S1; impl SendX for S1 {}
+struct S2; impl SendX for S2 {}
+struct U1;
+
+trait Trait { fn method(self); }
+
+struct X1<A, B> {}
+impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {}
+
+struct S<B, C> {}
+
+trait FnX {}
+
+impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {}
+
+fn test() { (S {}).method(); }
+ //^^^^^^^^^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_super_trait_not_in_scope() {
+ check_infer(
+ r#"
+ mod m {
+ pub trait SuperTrait {
+ fn foo(&self) -> u32 { 0 }
+ }
+ }
+ trait Trait: m::SuperTrait {}
+
+ struct S;
+ impl m::SuperTrait for S {}
+ impl Trait for S {}
+
+ fn test(d: &dyn Trait) {
+ d.foo();
+ }
+ "#,
+ expect![[r#"
+ 51..55 'self': &Self
+ 64..69 '{ 0 }': u32
+ 66..67 '0': u32
+ 176..177 'd': &dyn Trait
+ 191..207 '{ ...o(); }': ()
+ 197..198 'd': &dyn Trait
+ 197..204 'd.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_foreign_opaque_type() {
+ check_infer(
+ r#"
+extern "C" {
+ type S;
+ fn f() -> &'static S;
+}
+
+impl S {
+ fn foo(&self) -> bool {
+ true
+ }
+}
+
+fn test() {
+ let s = unsafe { f() };
+ s.foo();
+}
+"#,
+ expect![[r#"
+ 75..79 'self': &S
+ 89..109 '{ ... }': bool
+ 99..103 'true': bool
+ 123..167 '{ ...o(); }': ()
+ 133..134 's': &S
+ 137..151 'unsafe { f() }': &S
+ 137..151 'unsafe { f() }': &S
+ 146..147 'f': fn f() -> &S
+ 146..149 'f()': &S
+ 157..158 's': &S
+ 157..164 's.foo()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn method_with_allocator_box_self_type() {
+ check_types(
+ r#"
+struct Slice<T> {}
+struct Box<T, A> {}
+
+impl<T> Slice<T> {
+ pub fn into_vec<A>(self: Box<Self, A>) { }
+}
+
+fn main() {
+ let foo: Slice<u32>;
+ foo.into_vec(); // we shouldn't crash on this at least
+} //^^^^^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_on_dyn_impl() {
+ check_types(
+ r#"
+trait Foo {}
+
+impl Foo for u32 {}
+impl dyn Foo + '_ {
+ pub fn dyn_foo(&self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let f = &42u32 as &dyn Foo;
+ f.dyn_foo();
+ // ^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn autoderef_visibility_field() {
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().0;
+ // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
+ // ^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn autoderef_visibility_method() {
+ cov_mark::check!(autoderef_candidate_not_visible);
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ impl Foo {
+ pub fn mango(&self) -> char {
+ self.0
+ }
+ }
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ fn mango(&self) -> i32 {
+ self.0
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().mango();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_vs_private_inherent_const() {
+ cov_mark::check!(const_candidate_not_visible);
+ check(
+ r#"
+mod a {
+ pub struct Foo;
+ impl Foo {
+ const VALUE: u32 = 2;
+ }
+ pub trait Trait {
+ const VALUE: usize;
+ }
+ impl Trait for Foo {
+ const VALUE: usize = 3;
+ }
+
+ fn foo() {
+ let x = Foo::VALUE;
+ // ^^^^^^^^^^ type: u32
+ }
+}
+use a::Trait;
+fn foo() {
+ let x = a::Foo::VALUE;
+ // ^^^^^^^^^^^^^ type: usize
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_impl_in_synstructure_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _DERIVE_Tr_: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn inherent_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+const _: () = {
+ impl S {
+ fn method(&self) -> u16 { 0 }
+
+ pub(super) fn super_method(&self) -> u16 { 0 }
+
+ pub(crate) fn crate_method(&self) -> u16 { 0 }
+
+ pub fn pub_method(&self) -> u16 { 0 }
+ }
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+
+ S.super_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.crate_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.pub_method();
+ //^^^^^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_array_methods() {
+ check_types(
+ r#"
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn map<F, U>(self, f: F) -> [U; N]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn map<F, U>(self, f: F) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f() {
+ let v = [1, 2].map::<_, usize>(|x| -> x * 2);
+ v;
+ //^ [usize; 2]
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_method() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn my_map<F, U, const X: usize>(self, f: F, c: Const<X>) -> [U; X]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn my_map<F, const X: usize, U>(self, f: F, c: Const<X>) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f<const C: usize, P>() {
+ let v = [1, 2].my_map::<_, (), 12>(|x| -> x * 2, Const::<12>);
+ v;
+ //^ [(); 12]
+ let v = [1, 2].my_map::<_, P, C>(|x| -> x * 2, Const::<C>);
+ v;
+ //^ [P; C]
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_type_alias() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+type U2 = Const<2>;
+type U5 = Const<5>;
+
+impl U2 {
+ fn f(self) -> Const<12> {
+ loop {}
+ }
+}
+
+impl U5 {
+ fn f(self) -> Const<15> {
+ loop {}
+ }
+}
+
+fn f(x: U2) {
+ let y = x.f();
+ //^ Const<12>
+}
+ "#,
+ );
+}
+
+#[test]
+fn skip_array_during_method_dispatch() {
+ check_types(
+ r#"
+//- /main2018.rs crate:main2018 deps:core
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ &i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /main2021.rs crate:main2021 deps:core edition:2021
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /core.rs crate:core
+#[rustc_skip_array_during_method_dispatch]
+pub trait IntoIterator {
+ type Out;
+ fn into_iter(self) -> Self::Out;
+}
+
+impl<T> IntoIterator for [T; 1] {
+ type Out = T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+impl<'a, T> IntoIterator for &'a [T] {
+ type Out = &'a T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+ "#,
+ );
+}
+
+#[test]
+fn sized_blanket_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Foo { fn foo() -> u8; }
+impl<T: Sized> Foo for T {}
+fn f<S: Sized, T, U: ?Sized>() {
+ u32::foo;
+ S::foo;
+ T::foo;
+ U::foo;
+ <[u32]>::foo;
+}
+"#,
+ expect![[r#"
+ 89..160 '{ ...foo; }': ()
+ 95..103 'u32::foo': fn foo<u32>() -> u8
+ 109..115 'S::foo': fn foo<S>() -> u8
+ 121..127 'T::foo': fn foo<T>() -> u8
+ 133..139 'U::foo': {unknown}
+ 145..157 '<[u32]>::foo': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn local_impl() {
+ check_types(
+ r#"
+fn main() {
+ struct SomeStruct(i32);
+
+ impl SomeStruct {
+ fn is_even(&self) -> bool {
+ self.0 % 2 == 0
+ }
+ }
+
+ let o = SomeStruct(3);
+ let is_even = o.is_even();
+ // ^^^^^^^ bool
+}
+ "#,
+ );
+}
+
+#[test]
+fn deref_fun_1() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), make());
+ let _: usize = (*a1).0;
+ a1;
+ //^^ A<B<usize>, u32>
+
+ let a2 = A(make(), make());
+ a2.thing();
+ //^^^^^^^^^^ isize
+ a2;
+ //^^ A<B<isize>, u32>
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_fun_2() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T> core::ops::Deref for A<C<T>, i32> {
+ type Target = C<T>;
+ fn deref(&self) -> &C<T> { &self.0 }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), 1u32);
+ a1.thing();
+ a1;
+ //^^ A<B<isize>, u32>
+
+ let a2 = A(make(), 1i32);
+ let _: &str = a2.thing();
+ a2;
+ //^^ A<C<&str>, i32>
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_autoref() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn test() {
+ Foo.foo();
+ //^^^ adjustments: Borrow(Ref(Not))
+ (&Foo).foo();
+ // ^^^^ adjustments: ,
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_unsize_array() {
+ // FIXME not quite correct
+ check(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not))
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_1() {
+ check_no_mismatches(
+ r#"
+//- minicore: sized
+pub trait Into<T>: Sized {
+ fn into(self) -> T;
+}
+impl<T> Into<T> for T {
+ fn into(self) -> T { self }
+}
+
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test(generic_args: impl Into<Foo>) {
+ let generic_args = generic_args.into();
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_2() {
+ check_no_mismatches(
+ r#"
+//- minicore: deref
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test() {
+ let generic_args;
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn resolve_minicore_iterator() {
+ check_types(
+ r#"
+//- minicore: iterators, sized
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Option<i32>
+"#,
+ );
+}
+
+#[test]
+fn primitive_assoc_fn_shadowed_by_use() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::u16;
+
+fn f() -> u16 {
+ let x = u16::from_le_bytes();
+ x
+ //^ u16
+}
+
+//- /core.rs crate:core
+pub mod u16 {}
+
+impl u16 {
+ pub fn from_le_bytes() -> Self { 0 }
+}
+ "#,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
new file mode 100644
index 000000000..fbdc8209f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
@@ -0,0 +1,485 @@
+use expect_test::expect;
+
+use super::{check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn infer_never1() {
+ check_types(
+ r#"
+fn test() {
+ let t = return;
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never2() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { a } else { loop {} };
+ a;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never3() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ !
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_in_generic_args() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+} //^ Option<!>
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred1() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ ()
+ if false { a };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred2() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<i32>
+ match 42 {
+ 42 => a,
+ _ => Option::Some(42),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred3() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<&str>
+ match 42 {
+ 42 => a,
+ _ => Option::Some("str"),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn match_no_arm() {
+ check_types(
+ r#"
+enum Void {}
+
+fn test(a: Void) {
+ let t = match a {};
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_unknown_arm() {
+ check_types(
+ r#"
+fn test(a: Option) {
+ let t = match 0 {
+ _ => unknown,
+ };
+ t;
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn if_never() {
+ check_types(
+ r#"
+fn test() {
+ let i = if true {
+ loop {}
+ } else {
+ 3.0
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn if_else_never() {
+ check_types(
+ r#"
+fn test(input: bool) {
+ let i = if input {
+ 2.0
+ } else {
+ return
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_first_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => return,
+ 2 => 2.0,
+ 3 => loop {},
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_second_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => 3.0,
+ 2 => loop {},
+ 3 => 3.0,
+ _ => return,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_all_arms_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => return,
+ _ => loop {},
+ };
+ i;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_no_never_arms() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => 2.0,
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn diverging_expression_1() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ let x: u32 = return;
+ }
+ fn test2() {
+ let x: u32 = { return; };
+ }
+ fn test3() {
+ let x: u32 = loop {};
+ }
+ fn test4() {
+ let x: u32 = { loop {} };
+ }
+ fn test5() {
+ let x: u32 = { if true { loop {}; } else { loop {}; } };
+ }
+ fn test6() {
+ let x: u32 = { let y: u32 = { loop {}; }; };
+ }
+ ",
+ expect![[r"
+ 11..39 '{ ...urn; }': ()
+ 21..22 'x': u32
+ 30..36 'return': !
+ 51..84 '{ ...; }; }': ()
+ 61..62 'x': u32
+ 70..81 '{ return; }': u32
+ 72..78 'return': !
+ 96..125 '{ ... {}; }': ()
+ 106..107 'x': u32
+ 115..122 'loop {}': !
+ 120..122 '{}': ()
+ 137..170 '{ ...} }; }': ()
+ 147..148 'x': u32
+ 156..167 '{ loop {} }': u32
+ 158..165 'loop {}': !
+ 163..165 '{}': ()
+ 182..246 '{ ...} }; }': ()
+ 192..193 'x': u32
+ 201..243 '{ if t...}; } }': u32
+ 203..241 'if tru... {}; }': u32
+ 206..210 'true': bool
+ 211..223 '{ loop {}; }': u32
+ 213..220 'loop {}': !
+ 218..220 '{}': ()
+ 229..241 '{ loop {}; }': u32
+ 231..238 'loop {}': !
+ 236..238 '{}': ()
+ 258..310 '{ ...; }; }': ()
+ 268..269 'x': u32
+ 277..307 '{ let ...; }; }': u32
+ 283..284 'y': u32
+ 292..304 '{ loop {}; }': u32
+ 294..301 'loop {}': !
+ 299..301 '{}': ()
+ "]],
+ );
+}
+
+#[test]
+fn diverging_expression_2() {
+ check_infer_with_mismatches(
+ r#"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop {}; "foo" };
+ }
+ "#,
+ expect![[r#"
+ 11..84 '{ ..." }; }': ()
+ 54..55 'x': u32
+ 63..81 '{ loop...foo" }': u32
+ 65..72 'loop {}': !
+ 70..72 '{}': ()
+ 74..79 '"foo"': &str
+ 74..79: expected u32, got &str
+ "#]],
+ );
+}
+
+#[test]
+fn diverging_expression_3_break() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop { break; } };
+ }
+ fn test2() {
+ // should give type mismatch
+ let x: u32 = { for a in b { break; }; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b {}; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b { return; }; };
+ }
+ fn test3() {
+ // should give type mismatch
+ let x: u32 = { while true { break; }; };
+ // should give type mismatch as well -- there's an implicit break, even if it's never hit
+ let x: u32 = { while true {}; };
+ // should give type mismatch as well
+ let x: u32 = { while true { return; }; };
+ }
+ ",
+ expect![[r#"
+ 11..85 '{ ...} }; }': ()
+ 54..55 'x': u32
+ 63..82 '{ loop...k; } }': u32
+ 65..80 'loop { break; }': ()
+ 70..80 '{ break; }': ()
+ 72..77 'break': !
+ 65..80: expected u32, got ()
+ 97..343 '{ ...; }; }': ()
+ 140..141 'x': u32
+ 149..175 '{ for ...; }; }': u32
+ 151..172 'for a ...eak; }': ()
+ 155..156 'a': {unknown}
+ 160..161 'b': {unknown}
+ 162..172 '{ break; }': ()
+ 164..169 'break': !
+ 226..227 'x': u32
+ 235..253 '{ for ... {}; }': u32
+ 237..250 'for a in b {}': ()
+ 241..242 'a': {unknown}
+ 246..247 'b': {unknown}
+ 248..250 '{}': ()
+ 304..305 'x': u32
+ 313..340 '{ for ...; }; }': u32
+ 315..337 'for a ...urn; }': ()
+ 319..320 'a': {unknown}
+ 324..325 'b': {unknown}
+ 326..337 '{ return; }': ()
+ 328..334 'return': !
+ 149..175: expected u32, got ()
+ 235..253: expected u32, got ()
+ 313..340: expected u32, got ()
+ 355..654 '{ ...; }; }': ()
+ 398..399 'x': u32
+ 407..433 '{ whil...; }; }': u32
+ 409..430 'while ...eak; }': ()
+ 415..419 'true': bool
+ 420..430 '{ break; }': ()
+ 422..427 'break': !
+ 537..538 'x': u32
+ 546..564 '{ whil... {}; }': u32
+ 548..561 'while true {}': ()
+ 554..558 'true': bool
+ 559..561 '{}': ()
+ 615..616 'x': u32
+ 624..651 '{ whil...; }; }': u32
+ 626..648 'while ...urn; }': ()
+ 632..636 'true': bool
+ 637..648 '{ return; }': ()
+ 639..645 'return': !
+ 407..433: expected u32, got ()
+ 546..564: expected u32, got ()
+ 624..651: expected u32, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn let_else_must_diverge() {
+ check_infer_with_mismatches(
+ r#"
+ fn f() {
+ let 1 = 2 else {
+ return;
+ };
+ }
+ "#,
+ expect![[r#"
+ 7..54 '{ ... }; }': ()
+ 17..18 '1': i32
+ 17..18 '1': i32
+ 21..22 '2': i32
+ 28..51 '{ ... }': !
+ 38..44 'return': !
+ "#]],
+ );
+ check_infer_with_mismatches(
+ r#"
+ fn f() {
+ let 1 = 2 else {};
+ }
+ "#,
+ expect![[r#"
+ 7..33 '{ ... {}; }': ()
+ 17..18 '1': i32
+ 17..18 '1': i32
+ 21..22 '2': i32
+ 28..30 '{}': !
+ 28..30: expected !, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_11837() {
+ check_no_mismatches(
+ r#"
+//- minicore: result
+enum MyErr {
+ Err1,
+ Err2,
+}
+
+fn example_ng() {
+ let value: Result<i32, MyErr> = Ok(3);
+
+ loop {
+ let ret = match value {
+ Ok(value) => value,
+ Err(ref err) => {
+ match err {
+ MyErr::Err1 => break,
+ MyErr::Err2 => continue,
+ };
+ }
+ };
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn issue_11814() {
+ check_no_mismatches(
+ r#"
+fn example() -> bool {
+ match 1 {
+ _ => return true,
+ };
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
new file mode 100644
index 000000000..399553356
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
@@ -0,0 +1,991 @@
+use expect_test::expect;
+
+use super::{check, check_infer, check_infer_with_mismatches, check_types};
+
+#[test]
+fn infer_pattern() {
+ check_infer(
+ r#"
+ fn test(x: &i32) {
+ let y = x;
+ let &z = x;
+ let a = z;
+ let (c, d) = (1, "hello");
+
+ for (e, f) in some_iter {
+ let g = e;
+ }
+
+ if let [val] = opt {
+ let h = val;
+ }
+
+ if let x @ true = &true {}
+
+ let lambda = |a: u64, b, c: i32| { a + b; c };
+
+ let ref ref_to_x = x;
+ let mut mut_x = x;
+ let ref mut mut_ref_to_x = x;
+ let k = mut_ref_to_x;
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..400 '{ ...o_x; }': ()
+ 27..28 'y': &i32
+ 31..32 'x': &i32
+ 42..44 '&z': &i32
+ 43..44 'z': i32
+ 47..48 'x': &i32
+ 58..59 'a': i32
+ 62..63 'z': i32
+ 73..79 '(c, d)': (i32, &str)
+ 74..75 'c': i32
+ 77..78 'd': &str
+ 82..94 '(1, "hello")': (i32, &str)
+ 83..84 '1': i32
+ 86..93 '"hello"': &str
+ 101..151 'for (e... }': ()
+ 105..111 '(e, f)': ({unknown}, {unknown})
+ 106..107 'e': {unknown}
+ 109..110 'f': {unknown}
+ 115..124 'some_iter': {unknown}
+ 125..151 '{ ... }': ()
+ 139..140 'g': {unknown}
+ 143..144 'e': {unknown}
+ 157..204 'if let... }': ()
+ 160..175 'let [val] = opt': bool
+ 164..169 '[val]': [{unknown}]
+ 165..168 'val': {unknown}
+ 172..175 'opt': [{unknown}]
+ 176..204 '{ ... }': ()
+ 190..191 'h': {unknown}
+ 194..197 'val': {unknown}
+ 210..236 'if let...rue {}': ()
+ 213..233 'let x ... &true': bool
+ 217..225 'x @ true': &bool
+ 221..225 'true': bool
+ 221..225 'true': bool
+ 228..233 '&true': &bool
+ 229..233 'true': bool
+ 234..236 '{}': ()
+ 246..252 'lambda': |u64, u64, i32| -> i32
+ 255..287 '|a: u6...b; c }': |u64, u64, i32| -> i32
+ 256..257 'a': u64
+ 264..265 'b': u64
+ 267..268 'c': i32
+ 275..287 '{ a + b; c }': i32
+ 277..278 'a': u64
+ 277..282 'a + b': u64
+ 281..282 'b': u64
+ 284..285 'c': i32
+ 298..310 'ref ref_to_x': &&i32
+ 313..314 'x': &i32
+ 324..333 'mut mut_x': &i32
+ 336..337 'x': &i32
+ 347..367 'ref mu...f_to_x': &mut &i32
+ 370..371 'x': &i32
+ 381..382 'k': &mut &i32
+ 385..397 'mut_ref_to_x': &mut &i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literal_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn any<T>() -> T { loop {} }
+ fn test(x: &i32) {
+ if let "foo" = any() {}
+ if let 1 = any() {}
+ if let 1u32 = any() {}
+ if let 1f32 = any() {}
+ if let 1.0 = any() {}
+ if let true = any() {}
+ }
+ "#,
+ expect![[r#"
+ 17..28 '{ loop {} }': T
+ 19..26 'loop {}': !
+ 24..26 '{}': ()
+ 37..38 'x': &i32
+ 46..208 '{ ...) {} }': ()
+ 52..75 'if let...y() {}': ()
+ 55..72 'let "f... any()': bool
+ 59..64 '"foo"': &str
+ 59..64 '"foo"': &str
+ 67..70 'any': fn any<&str>() -> &str
+ 67..72 'any()': &str
+ 73..75 '{}': ()
+ 80..99 'if let...y() {}': ()
+ 83..96 'let 1 = any()': bool
+ 87..88 '1': i32
+ 87..88 '1': i32
+ 91..94 'any': fn any<i32>() -> i32
+ 91..96 'any()': i32
+ 97..99 '{}': ()
+ 104..126 'if let...y() {}': ()
+ 107..123 'let 1u... any()': bool
+ 111..115 '1u32': u32
+ 111..115 '1u32': u32
+ 118..121 'any': fn any<u32>() -> u32
+ 118..123 'any()': u32
+ 124..126 '{}': ()
+ 131..153 'if let...y() {}': ()
+ 134..150 'let 1f... any()': bool
+ 138..142 '1f32': f32
+ 138..142 '1f32': f32
+ 145..148 'any': fn any<f32>() -> f32
+ 145..150 'any()': f32
+ 151..153 '{}': ()
+ 158..179 'if let...y() {}': ()
+ 161..176 'let 1.0 = any()': bool
+ 165..168 '1.0': f64
+ 165..168 '1.0': f64
+ 171..174 'any': fn any<f64>() -> f64
+ 171..176 'any()': f64
+ 177..179 '{}': ()
+ 184..206 'if let...y() {}': ()
+ 187..203 'let tr... any()': bool
+ 191..195 'true': bool
+ 191..195 'true': bool
+ 198..201 'any': fn any<bool>() -> bool
+ 198..203 'any()': bool
+ 204..206 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_range_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn test(x: &i32) {
+ if let 1..76 = 2u32 {}
+ if let 1..=76 = 2u32 {}
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..75 '{ ...2 {} }': ()
+ 23..45 'if let...u32 {}': ()
+ 26..42 'let 1....= 2u32': bool
+ 30..35 '1..76': u32
+ 38..42 '2u32': u32
+ 43..45 '{}': ()
+ 50..73 'if let...u32 {}': ()
+ 53..70 'let 1....= 2u32': bool
+ 57..63 '1..=76': u32
+ 66..70 '2u32': u32
+ 71..73 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+
+ fn test() {
+ let A(n) = &A(1);
+ let A(n) = &mut A(1);
+ }
+ "#,
+ expect![[r#"
+ 27..78 '{ ...(1); }': ()
+ 37..41 'A(n)': A<i32>
+ 39..40 'n': &i32
+ 44..49 '&A(1)': &A<i32>
+ 45..46 'A': A<i32>(i32) -> A<i32>
+ 45..49 'A(1)': A<i32>
+ 47..48 '1': i32
+ 59..63 'A(n)': A<i32>
+ 61..62 'n': &mut i32
+ 66..75 '&mut A(1)': &mut A<i32>
+ 71..72 'A': A<i32>(i32) -> A<i32>
+ 71..75 'A(1)': A<i32>
+ 73..74 '1': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics_ref() {
+ cov_mark::check!(match_ergonomics_ref);
+ check_infer(
+ r#"
+ fn test() {
+ let v = &(1, &2);
+ let (_, &w) = v;
+ }
+ "#,
+ expect![[r#"
+ 10..56 '{ ...= v; }': ()
+ 20..21 'v': &(i32, &i32)
+ 24..32 '&(1, &2)': &(i32, &i32)
+ 25..32 '(1, &2)': (i32, &i32)
+ 26..27 '1': i32
+ 29..31 '&2': &i32
+ 30..31 '2': i32
+ 42..49 '(_, &w)': (i32, &i32)
+ 43..44 '_': i32
+ 46..48 '&w': &i32
+ 47..48 'w': i32
+ 52..53 'v': &(i32, &i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_slice() {
+ check_infer(
+ r#"
+ fn test() {
+ let slice: &[f64] = &[0.0];
+ match slice {
+ &[] => {},
+ &[a] => {
+ a;
+ },
+ &[b, c] => {
+ b;
+ c;
+ }
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..209 '{ ... } }': ()
+ 20..25 'slice': &[f64]
+ 36..42 '&[0.0]': &[f64; 1]
+ 37..42 '[0.0]': [f64; 1]
+ 38..41 '0.0': f64
+ 48..207 'match ... }': ()
+ 54..59 'slice': &[f64]
+ 70..73 '&[]': &[f64]
+ 71..73 '[]': [f64]
+ 77..79 '{}': ()
+ 89..93 '&[a]': &[f64]
+ 90..93 '[a]': [f64]
+ 91..92 'a': f64
+ 97..123 '{ ... }': ()
+ 111..112 'a': f64
+ 133..140 '&[b, c]': &[f64]
+ 134..140 '[b, c]': [f64]
+ 135..136 'b': f64
+ 138..139 'c': f64
+ 144..185 '{ ... }': ()
+ 158..159 'b': f64
+ 173..174 'c': f64
+ 194..195 '_': &[f64]
+ 199..201 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_string_literal() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..98 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..96 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 72..74 '{}': ()
+ 83..84 '_': &str
+ 88..90 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_or() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" | "world" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..108 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..106 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 61..78 '"hello...world"': &str
+ 71..78 '"world"': &str
+ 71..78 '"world"': &str
+ 82..84 '{}': ()
+ 93..94 '_': &str
+ 98..100 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_arr() {
+ check_infer(
+ r#"
+ fn test() {
+ let arr: [f64; 2] = [0.0, 1.0];
+ match arr {
+ [1.0, a] => {
+ a;
+ },
+ [b, c] => {
+ b;
+ c;
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..179 '{ ... } }': ()
+ 20..23 'arr': [f64; 2]
+ 36..46 '[0.0, 1.0]': [f64; 2]
+ 37..40 '0.0': f64
+ 42..45 '1.0': f64
+ 52..177 'match ... }': ()
+ 58..61 'arr': [f64; 2]
+ 72..80 '[1.0, a]': [f64; 2]
+ 73..76 '1.0': f64
+ 73..76 '1.0': f64
+ 78..79 'a': f64
+ 84..110 '{ ... }': ()
+ 98..99 'a': f64
+ 120..126 '[b, c]': [f64; 2]
+ 121..122 'b': f64
+ 124..125 'c': f64
+ 130..171 '{ ... }': ()
+ 144..145 'b': f64
+ 159..160 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_adt_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B
+ }
+
+ struct S(u32, E);
+
+ fn test() {
+ let e = E::A { x: 3 };
+
+ let S(y, z) = foo;
+ let E::A { x: new_var } = e;
+
+ match e {
+ E::A { x } => x,
+ E::B if foo => 1,
+ E::B => 10,
+ };
+
+ let ref d @ E::A { .. } = e;
+ d;
+ }
+ "#,
+ expect![[r#"
+ 67..288 '{ ... d; }': ()
+ 77..78 'e': E
+ 81..94 'E::A { x: 3 }': E
+ 91..92 '3': usize
+ 105..112 'S(y, z)': S
+ 107..108 'y': u32
+ 110..111 'z': E
+ 115..118 'foo': S
+ 128..147 'E::A {..._var }': E
+ 138..145 'new_var': usize
+ 150..151 'e': E
+ 158..244 'match ... }': usize
+ 164..165 'e': E
+ 176..186 'E::A { x }': E
+ 183..184 'x': usize
+ 190..191 'x': usize
+ 201..205 'E::B': E
+ 209..212 'foo': bool
+ 216..217 '1': usize
+ 227..231 'E::B': E
+ 235..237 '10': usize
+ 255..274 'ref d ...{ .. }': &E
+ 263..274 'E::A { .. }': E
+ 277..278 'e': E
+ 284..285 'd': &E
+ "#]],
+ );
+}
+
+#[test]
+fn enum_variant_through_self_in_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B(usize),
+ C
+ }
+
+ impl E {
+ fn test() {
+ match (loop {}) {
+ Self::A { x } => { x; },
+ Self::B(x) => { x; },
+ Self::C => {},
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 75..217 '{ ... }': ()
+ 85..210 'match ... }': ()
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 115..128 'Self::A { x }': E
+ 125..126 'x': usize
+ 132..138 '{ x; }': ()
+ 134..135 'x': usize
+ 152..162 'Self::B(x)': E
+ 160..161 'x': usize
+ 166..172 '{ x; }': ()
+ 168..169 'x': usize
+ 186..193 'Self::C': E
+ 197..199 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generics_in_patterns() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+
+ fn test(a1: A<u32>, o: Option<u64>) {
+ let A { x: x2 } = a1;
+ let A::<i64> { x: x3 } = A { x: 1 };
+ match o {
+ Option::Some(t) => t,
+ _ => 1,
+ };
+ }
+ "#,
+ expect![[r#"
+ 78..80 'a1': A<u32>
+ 90..91 'o': Option<u64>
+ 106..243 '{ ... }; }': ()
+ 116..127 'A { x: x2 }': A<u32>
+ 123..125 'x2': u32
+ 130..132 'a1': A<u32>
+ 142..160 'A::<i6...: x3 }': A<i64>
+ 156..158 'x3': i64
+ 163..173 'A { x: 1 }': A<i64>
+ 170..171 '1': i64
+ 179..240 'match ... }': u64
+ 185..186 'o': Option<u64>
+ 197..212 'Option::Some(t)': Option<u64>
+ 210..211 't': u64
+ 216..217 't': u64
+ 227..228 '_': Option<u64>
+ 232..233 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const_pattern() {
+ check(
+ r#"
+enum Option<T> { None }
+use Option::None;
+struct Foo;
+const Bar: usize = 1;
+
+fn test() {
+ let a: Option<u32> = None;
+ let b: Option<i64> = match a {
+ None => None,
+ };
+ let _: () = match () { Foo => () };
+ // ^^^ expected (), got Foo
+ let _: () = match () { Bar => () };
+ // ^^^ expected (), got usize
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_guard() {
+ check_infer(
+ r#"
+struct S;
+impl S { fn foo(&self) -> bool { false } }
+
+fn main() {
+ match S {
+ s if s.foo() => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 27..31 'self': &S
+ 41..50 '{ false }': bool
+ 43..48 'false': bool
+ 64..115 '{ ... } }': ()
+ 70..113 'match ... }': ()
+ 76..77 'S': S
+ 88..89 's': S
+ 93..94 's': S
+ 93..100 's.foo()': bool
+ 104..106 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn match_ergonomics_in_closure_params() {
+ check_infer(
+ r#"
+//- minicore: fn
+fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
+
+fn test() {
+ foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
+ foo(&(1, "a"), |(x, y)| x);
+}
+"#,
+ expect![[r#"
+ 32..33 't': T
+ 38..39 'f': F
+ 49..60 '{ loop {} }': U
+ 51..58 'loop {}': !
+ 56..58 '{}': ()
+ 72..171 '{ ... x); }': ()
+ 78..81 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
+ 78..105 'foo(&(...y)| x)': i32
+ 82..91 '&(1, "a")': &(i32, &str)
+ 83..91 '(1, "a")': (i32, &str)
+ 84..85 '1': i32
+ 87..90 '"a"': &str
+ 93..104 '|&(x, y)| x': |&(i32, &str)| -> i32
+ 94..101 '&(x, y)': &(i32, &str)
+ 95..101 '(x, y)': (i32, &str)
+ 96..97 'x': i32
+ 99..100 'y': &str
+ 103..104 'x': i32
+ 142..145 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
+ 142..168 'foo(&(...y)| x)': &i32
+ 146..155 '&(1, "a")': &(i32, &str)
+ 147..155 '(1, "a")': (i32, &str)
+ 148..149 '1': i32
+ 151..154 '"a"': &str
+ 157..167 '|(x, y)| x': |&(i32, &str)| -> &i32
+ 158..164 '(x, y)': (i32, &str)
+ 159..160 'x': &i32
+ 162..163 'y': &&str
+ 166..167 'x': &i32
+ "#]],
+ );
+}
+
+#[test]
+fn slice_tail_pattern() {
+ check_infer(
+ r#"
+ fn foo(params: &[i32]) {
+ match params {
+ [head, tail @ ..] => {
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[i32]
+ 23..92 '{ ... } }': ()
+ 29..90 'match ... }': ()
+ 35..41 'params': &[i32]
+ 52..69 '[head,... @ ..]': [i32]
+ 53..57 'head': &i32
+ 59..68 'tail @ ..': &[i32]
+ 66..68 '..': [i32]
+ 73..84 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn box_pattern() {
+ check_infer(
+ r#"
+ pub struct Global;
+ #[lang = "owned_box"]
+ pub struct Box<T, A = Global>(T);
+
+ fn foo(params: Box<i32>) {
+ match params {
+ box integer => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 83..89 'params': Box<i32, Global>
+ 101..155 '{ ... } }': ()
+ 107..153 'match ... }': ()
+ 113..119 'params': Box<i32, Global>
+ 130..141 'box integer': Box<i32, Global>
+ 134..141 'integer': i32
+ 145..147 '{}': ()
+ "#]],
+ );
+ check_infer(
+ r#"
+ #[lang = "owned_box"]
+ pub struct Box<T>(T);
+
+ fn foo(params: Box<i32>) {
+ match params {
+ box integer => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 52..58 'params': Box<i32>
+ 70..124 '{ ... } }': ()
+ 76..122 'match ... }': ()
+ 82..88 'params': Box<i32>
+ 99..110 'box integer': Box<i32>
+ 103..110 'integer': i32
+ 114..116 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_ellipsis_pattern() {
+ check_infer_with_mismatches(
+ r#"
+fn foo(tuple: (u8, i16, f32)) {
+ match tuple {
+ (.., b, c) => {},
+ (a, .., c) => {},
+ (a, b, ..) => {},
+ (a, b) => {/*too short*/}
+ (a, b, c, d) => {/*too long*/}
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 7..12 'tuple': (u8, i16, f32)
+ 30..224 '{ ... } }': ()
+ 36..222 'match ... }': ()
+ 42..47 'tuple': (u8, i16, f32)
+ 58..68 '(.., b, c)': (u8, i16, f32)
+ 63..64 'b': i16
+ 66..67 'c': f32
+ 72..74 '{}': ()
+ 84..94 '(a, .., c)': (u8, i16, f32)
+ 85..86 'a': u8
+ 92..93 'c': f32
+ 98..100 '{}': ()
+ 110..120 '(a, b, ..)': (u8, i16, f32)
+ 111..112 'a': u8
+ 114..115 'b': i16
+ 124..126 '{}': ()
+ 136..142 '(a, b)': (u8, i16)
+ 137..138 'a': u8
+ 140..141 'b': i16
+ 146..161 '{/*too short*/}': ()
+ 170..182 '(a, b, c, d)': (u8, i16, f32, {unknown})
+ 171..172 'a': u8
+ 174..175 'b': i16
+ 177..178 'c': f32
+ 180..181 'd': {unknown}
+ 186..200 '{/*too long*/}': ()
+ 209..210 '_': (u8, i16, f32)
+ 214..216 '{}': ()
+ 136..142: expected (u8, i16, f32), got (u8, i16)
+ 170..182: expected (u8, i16, f32), got (u8, i16, f32, {unknown})
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_ellipsis_pattern() {
+ check_infer(
+ r#"
+struct Tuple(u8, i16, f32);
+fn foo(tuple: Tuple) {
+ match tuple {
+ Tuple(.., b, c) => {},
+ Tuple(a, .., c) => {},
+ Tuple(a, b, ..) => {},
+ Tuple(a, b) => {/*too short*/}
+ Tuple(a, b, c, d) => {/*too long*/}
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 35..40 'tuple': Tuple
+ 49..268 '{ ... } }': ()
+ 55..266 'match ... }': ()
+ 61..66 'tuple': Tuple
+ 77..92 'Tuple(.., b, c)': Tuple
+ 87..88 'b': i16
+ 90..91 'c': f32
+ 96..98 '{}': ()
+ 108..123 'Tuple(a, .., c)': Tuple
+ 114..115 'a': u8
+ 121..122 'c': f32
+ 127..129 '{}': ()
+ 139..154 'Tuple(a, b, ..)': Tuple
+ 145..146 'a': u8
+ 148..149 'b': i16
+ 158..160 '{}': ()
+ 170..181 'Tuple(a, b)': Tuple
+ 176..177 'a': u8
+ 179..180 'b': i16
+ 185..200 '{/*too short*/}': ()
+ 209..226 'Tuple(... c, d)': Tuple
+ 215..216 'a': u8
+ 218..219 'b': i16
+ 221..222 'c': f32
+ 224..225 'd': {unknown}
+ 230..244 '{/*too long*/}': ()
+ 253..254 '_': Tuple
+ 258..260 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn const_block_pattern() {
+ check_infer(
+ r#"
+struct Foo(usize);
+fn foo(foo: Foo) {
+ match foo {
+ const { Foo(15 + 32) } => {},
+ _ => {}
+ }
+}"#,
+ expect![[r#"
+ 26..29 'foo': Foo
+ 36..115 '{ ... } }': ()
+ 42..113 'match ... }': ()
+ 48..51 'foo': Foo
+ 62..84 'const ... 32) }': Foo
+ 68..84 '{ Foo(... 32) }': Foo
+ 70..73 'Foo': Foo(usize) -> Foo
+ 70..82 'Foo(15 + 32)': Foo
+ 74..76 '15': usize
+ 74..81 '15 + 32': usize
+ 79..81 '32': usize
+ 88..90 '{}': ()
+ 100..101 '_': Foo
+ 105..107 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn macro_pat() {
+ check_types(
+ r#"
+macro_rules! pat {
+ ($name:ident) => { Enum::Variant1($name) }
+}
+
+enum Enum {
+ Variant1(u8),
+ Variant2,
+}
+
+fn f(e: Enum) {
+ match e {
+ pat!(bind) => {
+ bind;
+ //^^^^ u8
+ }
+ Enum::Variant2 => {}
+ }
+}
+ "#,
+ )
+}
+
+#[test]
+fn type_mismatch_in_or_pattern() {
+ check_infer_with_mismatches(
+ r#"
+fn main() {
+ match (false,) {
+ (true | (),) => {}
+ (() | true,) => {}
+ (_ | (),) => {}
+ (() | _,) => {}
+ }
+}
+"#,
+ expect![[r#"
+ 10..142 '{ ... } }': ()
+ 16..140 'match ... }': ()
+ 22..30 '(false,)': (bool,)
+ 23..28 'false': bool
+ 41..53 '(true | (),)': (bool,)
+ 42..46 'true': bool
+ 42..46 'true': bool
+ 42..51 'true | ()': bool
+ 49..51 '()': ()
+ 57..59 '{}': ()
+ 68..80 '(() | true,)': ((),)
+ 69..71 '()': ()
+ 69..78 '() | true': ()
+ 74..78 'true': bool
+ 74..78 'true': bool
+ 84..86 '{}': ()
+ 95..104 '(_ | (),)': (bool,)
+ 96..97 '_': bool
+ 96..102 '_ | ()': bool
+ 100..102 '()': ()
+ 108..110 '{}': ()
+ 119..128 '(() | _,)': ((),)
+ 120..122 '()': ()
+ 120..126 '() | _': ()
+ 125..126 '_': bool
+ 132..134 '{}': ()
+ 49..51: expected bool, got ()
+ 68..80: expected (bool,), got ((),)
+ 69..71: expected bool, got ()
+ 69..78: expected bool, got ()
+ 100..102: expected bool, got ()
+ 119..128: expected (bool,), got ((),)
+ 120..122: expected bool, got ()
+ 120..126: expected bool, got ()
+ "#]],
+ );
+}
+
+#[test]
+fn slice_pattern_correctly_handles_array_length() {
+ check_infer(
+ r#"
+fn main() {
+ let [head, middle @ .., tail, tail2] = [1, 2, 3, 4, 5];
+}
+ "#,
+ expect![[r#"
+ 10..73 '{ ... 5]; }': ()
+ 20..52 '[head,...tail2]': [i32; 5]
+ 21..25 'head': i32
+ 27..38 'middle @ ..': [i32; 2]
+ 36..38 '..': [i32; 2]
+ 40..44 'tail': i32
+ 46..51 'tail2': i32
+ 55..70 '[1, 2, 3, 4, 5]': [i32; 5]
+ 56..57 '1': i32
+ 59..60 '2': i32
+ 62..63 '3': i32
+ 65..66 '4': i32
+ 68..69 '5': i32
+ "#]],
+ );
+}
+
+#[test]
+fn pattern_lookup_in_value_ns() {
+ check_types(
+ r#"
+use self::Constructor::*;
+struct IntRange {
+ range: (),
+}
+enum Constructor {
+ IntRange(IntRange),
+}
+fn main() {
+ match Constructor::IntRange(IntRange { range: () }) {
+ IntRange(x) => {
+ x;
+ //^ IntRange
+ }
+ Constructor::IntRange(x) => {
+ x;
+ //^ IntRange
+ }
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn if_let_guards() {
+ check_types(
+ r#"
+fn main() {
+ match (0,) {
+ opt if let (x,) = opt => {
+ x;
+ //^ i32
+ }
+ _ => {}
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn tuple_wildcard() {
+ check_types(
+ r#"
+fn main() {
+ enum Option<T> {Some(T), None}
+ use Option::*;
+
+ let mut x = None;
+ x;
+ //^ Option<(i32, i32)>
+
+ if let Some((_, _a)) = x {}
+
+ x = Some((1, 2));
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
new file mode 100644
index 000000000..93a88ab58
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -0,0 +1,1650 @@
+use expect_test::expect;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn bug_484() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = if true {};
+ }
+ "#,
+ expect![[r#"
+ 10..37 '{ ... {}; }': ()
+ 20..21 'x': ()
+ 24..34 'if true {}': ()
+ 27..31 'true': bool
+ 32..34 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn no_panic_on_field_of_enum() {
+ check_infer(
+ r#"
+ enum X {}
+
+ fn test(x: X) {
+ x.some_field;
+ }
+ "#,
+ expect![[r#"
+ 19..20 'x': X
+ 25..46 '{ ...eld; }': ()
+ 31..32 'x': X
+ 31..43 'x.some_field': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn bug_585() {
+ check_infer(
+ r#"
+ fn test() {
+ X {};
+ match x {
+ A::B {} => (),
+ A::Y() => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..88 '{ ... } }': ()
+ 16..20 'X {}': {unknown}
+ 26..86 'match ... }': ()
+ 32..33 'x': {unknown}
+ 44..51 'A::B {}': {unknown}
+ 55..57 '()': ()
+ 67..73 'A::Y()': {unknown}
+ 77..79 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn bug_651() {
+ check_infer(
+ r#"
+ fn quux() {
+ let y = 92;
+ 1 + y;
+ }
+ "#,
+ expect![[r#"
+ 10..40 '{ ...+ y; }': ()
+ 20..21 'y': i32
+ 24..26 '92': i32
+ 32..33 '1': i32
+ 32..37 '1 + y': i32
+ 36..37 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars() {
+ check_infer(
+ r#"
+ fn test() {
+ let y = unknown;
+ [y, &y];
+ }
+ "#,
+ expect![[r#"
+ 10..47 '{ ...&y]; }': ()
+ 20..21 'y': {unknown}
+ 24..31 'unknown': {unknown}
+ 37..44 '[y, &y]': [{unknown}; 2]
+ 38..39 'y': {unknown}
+ 41..43 '&y': &{unknown}
+ 42..43 'y': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars_2() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = unknown;
+ let y = unknown;
+ [(x, y), (&y, &x)];
+ }
+ "#,
+ expect![[r#"
+ 10..79 '{ ...x)]; }': ()
+ 20..21 'x': &{unknown}
+ 24..31 'unknown': &{unknown}
+ 41..42 'y': {unknown}
+ 45..52 'unknown': {unknown}
+ 58..76 '[(x, y..., &x)]': [(&{unknown}, {unknown}); 2]
+ 59..65 '(x, y)': (&{unknown}, {unknown})
+ 60..61 'x': &{unknown}
+ 63..64 'y': {unknown}
+ 67..75 '(&y, &x)': (&{unknown}, {unknown})
+ 68..70 '&y': &{unknown}
+ 69..70 'y': {unknown}
+ 72..74 '&x': &&{unknown}
+ 73..74 'x': &{unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn array_elements_expected_type() {
+ check_no_mismatches(
+ r#"
+ fn test() {
+ let x: [[u32; 2]; 2] = [[1, 2], [3, 4]];
+ }
+ "#,
+ );
+}
+
+#[test]
+fn infer_std_crash_1() {
+ // caused stack overflow, taken from std
+ check_infer(
+ r#"
+ enum Maybe<T> {
+ Real(T),
+ Fake,
+ }
+
+ fn write() {
+ match something_unknown {
+ Maybe::Real(ref mut something) => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 53..138 '{ ... } }': ()
+ 59..136 'match ... }': ()
+ 65..82 'someth...nknown': Maybe<{unknown}>
+ 93..123 'Maybe:...thing)': Maybe<{unknown}>
+ 105..122 'ref mu...ething': &mut {unknown}
+ 127..129 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_2() {
+ // caused "equating two type variables, ...", taken from std
+ check_infer(
+ r#"
+ fn test_line_buffer() {
+ &[0, b'\n', 1, b'\n'];
+ }
+ "#,
+ expect![[r#"
+ 22..52 '{ ...n']; }': ()
+ 28..49 '&[0, b...b'\n']': &[u8; 4]
+ 29..49 '[0, b'...b'\n']': [u8; 4]
+ 30..31 '0': u8
+ 33..38 'b'\n'': u8
+ 40..41 '1': u8
+ 43..48 'b'\n'': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_3() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn compute() {
+ match nope!() {
+ SizeSkeleton::Pointer { non_zero: true, tail } => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 17..107 '{ ... } }': ()
+ 23..105 'match ... }': ()
+ 29..36 'nope!()': {unknown}
+ 47..93 'SizeSk...tail }': {unknown}
+ 81..85 'true': bool
+ 81..85 'true': bool
+ 87..91 'tail': {unknown}
+ 97..99 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_4() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn primitive_type() {
+ match *self {
+ BorrowedRef { type_: Primitive(p), ..} => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 24..105 '{ ... } }': ()
+ 30..103 'match ... }': ()
+ 36..41 '*self': {unknown}
+ 37..41 'self': {unknown}
+ 52..90 'Borrow...), ..}': {unknown}
+ 73..85 'Primitive(p)': {unknown}
+ 83..84 'p': {unknown}
+ 94..96 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_5() {
+ // taken from rustc
+ check_infer(
+ r#"
+ fn extra_compiler_flags() {
+ for content in doesnt_matter {
+ let name = if doesnt_matter {
+ first
+ } else {
+ &content
+ };
+
+ let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
+ name
+ } else {
+ content
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 26..322 '{ ... } }': ()
+ 32..320 'for co... }': ()
+ 36..43 'content': {unknown}
+ 47..60 'doesnt_matter': {unknown}
+ 61..320 '{ ... }': ()
+ 75..79 'name': &{unknown}
+ 82..166 'if doe... }': &{unknown}
+ 85..98 'doesnt_matter': bool
+ 99..128 '{ ... }': &{unknown}
+ 113..118 'first': &{unknown}
+ 134..166 '{ ... }': &{unknown}
+ 148..156 '&content': &{unknown}
+ 149..156 'content': {unknown}
+ 181..188 'content': &{unknown}
+ 191..313 'if ICE... }': &{unknown}
+ 194..231 'ICE_RE..._VALUE': {unknown}
+ 194..247 'ICE_RE...&name)': bool
+ 241..246 '&name': &&{unknown}
+ 242..246 'name': &{unknown}
+ 248..276 '{ ... }': &{unknown}
+ 262..266 'name': &{unknown}
+ 282..313 '{ ... }': {unknown}
+ 296..303 'content': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_generics_crash() {
+ // another crash found typechecking rustc
+ check_infer(
+ r#"
+ struct Canonical<V> {
+ value: V,
+ }
+ struct QueryResponse<V> {
+ value: V,
+ }
+ fn test<R>(query_response: Canonical<QueryResponse<R>>) {
+ &query_response.value;
+ }
+ "#,
+ expect![[r#"
+ 91..105 'query_response': Canonical<QueryResponse<R>>
+ 136..166 '{ ...lue; }': ()
+ 142..163 '&query....value': &QueryResponse<R>
+ 143..157 'query_response': Canonical<QueryResponse<R>>
+ 143..163 'query_....value': QueryResponse<R>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paren_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = (bar!());
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()); }': ()
+ 54..55 'a': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = [bar!()];
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()]; }': ()
+ 54..55 'a': [u32; 1]
+ 58..66 '[bar!()]': [u32; 1]
+ "#]],
+ );
+}
+
+#[test]
+fn bug_1030() {
+ check_infer(
+ r#"
+ struct HashSet<T, H>;
+ struct FxHasher;
+ type FxHashSet<T> = HashSet<T, FxHasher>;
+
+ impl<T, H> HashSet<T, H> {
+ fn default() -> HashSet<T, H> {}
+ }
+
+ pub fn main_loop() {
+ FxHashSet::default();
+ }
+ "#,
+ expect![[r#"
+ 143..145 '{}': HashSet<T, H>
+ 168..197 '{ ...t(); }': ()
+ 174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
+ 174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2669() {
+ check_infer(
+ r#"
+ trait A {}
+ trait Write {}
+ struct Response<T> {}
+
+ trait D {
+ fn foo();
+ }
+
+ impl<T:A> D for Response<T> {
+ fn foo() {
+ end();
+ fn end<W: Write>() {
+ let _x: T = loop {};
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 119..214 '{ ... }': ()
+ 129..132 'end': fn end<{unknown}>()
+ 129..134 'end()': ()
+ 163..208 '{ ... }': ()
+ 181..183 '_x': !
+ 190..197 'loop {}': !
+ 195..197 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn issue_2705() {
+ check_infer(
+ r#"
+ trait Trait {}
+ fn test() {
+ <Trait<u32>>::foo()
+ }
+ "#,
+ expect![[r#"
+ 25..52 '{ ...oo() }': ()
+ 31..48 '<Trait...>::foo': {unknown}
+ 31..50 '<Trait...:foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2683_chars_impl() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Chars<'a> {}
+impl<'a> Iterator for Chars<'a> {
+ type Item = char;
+ fn next(&mut self) -> Option<char> { loop {} }
+}
+
+fn test() {
+ let chars: Chars<'_>;
+ (chars.next(), chars.nth(1));
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (Option<char>, Option<char>)
+"#,
+ );
+}
+
+#[test]
+fn issue_3999_slice() {
+ check_infer(
+ r#"
+ fn foo(params: &[usize]) {
+ match params {
+ [ps @ .., _] => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[usize]
+ 25..80 '{ ... } }': ()
+ 31..78 'match ... }': ()
+ 37..43 'params': &[usize]
+ 54..66 '[ps @ .., _]': [usize]
+ 55..62 'ps @ ..': &[usize]
+ 60..62 '..': [usize]
+ 64..65 '_': usize
+ 70..72 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_3999_struct() {
+ // rust-analyzer should not panic on seeing this malformed
+ // record pattern.
+ check_infer(
+ r#"
+ struct Bar {
+ a: bool,
+ }
+ fn foo(b: Bar) {
+ match b {
+ Bar { a: .. } => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 35..36 'b': Bar
+ 43..95 '{ ... } }': ()
+ 49..93 'match ... }': ()
+ 55..56 'b': Bar
+ 67..80 'Bar { a: .. }': Bar
+ 76..78 '..': bool
+ 84..86 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4235_name_conflicts() {
+ check_infer(
+ r#"
+ struct FOO {}
+ static FOO:FOO = FOO {};
+
+ impl FOO {
+ fn foo(&self) {}
+ }
+
+ fn main() {
+ let a = &FOO;
+ a.foo();
+ }
+ "#,
+ expect![[r#"
+ 31..37 'FOO {}': FOO
+ 63..67 'self': &FOO
+ 69..71 '{}': ()
+ 85..119 '{ ...o(); }': ()
+ 95..96 'a': &FOO
+ 99..103 '&FOO': &FOO
+ 100..103 'FOO': FOO
+ 109..110 'a': &FOO
+ 109..116 'a.foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4465_dollar_crate_at_type() {
+ check_infer(
+ r#"
+ pub struct Foo {}
+ pub fn anything<T>() -> T {
+ loop {}
+ }
+ macro_rules! foo {
+ () => {{
+ let r: $crate::Foo = anything();
+ r
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ 44..59 '{ loop {} }': T
+ 50..57 'loop {}': !
+ 55..57 '{}': ()
+ !0..31 '{letr:...g();r}': Foo
+ !4..5 'r': Foo
+ !18..26 'anything': fn anything<Foo>() -> Foo
+ !18..28 'anything()': Foo
+ !29..30 'r': Foo
+ 163..187 '{ ...!(); }': ()
+ 173..175 '_a': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6811() {
+ check_infer(
+ r#"
+ macro_rules! profile_function {
+ () => {
+ let _a = 1;
+ let _b = 1;
+ };
+ }
+ fn main() {
+ profile_function!();
+ }
+ "#,
+ expect![[r#"
+ !0..16 'let_a=...t_b=1;': ()
+ !3..5 '_a': i32
+ !6..7 '1': i32
+ !11..13 '_b': i32
+ !14..15 '1': i32
+ 103..131 '{ ...!(); }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4053_diesel_where_clauses() {
+ check_infer(
+ r#"
+ trait BoxedDsl<DB> {
+ type Output;
+ fn internal_into_boxed(self) -> Self::Output;
+ }
+
+ struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
+ order: Order,
+ }
+
+ trait QueryFragment<DB: Backend> {}
+
+ trait Into<T> { fn into(self) -> T; }
+
+ impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
+ for SelectStatement<F, S, D, W, O, LOf, G>
+ where
+ O: Into<dyn QueryFragment<DB>>,
+ {
+ type Output = XXX;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ self.order.into();
+ }
+ }
+ "#,
+ expect![[r#"
+ 65..69 'self': Self
+ 267..271 'self': Self
+ 466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 488..522 '{ ... }': ()
+ 498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 498..508 'self.order': O
+ 498..515 'self.o...into()': dyn QueryFragment<DB>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4953() {
+ check_infer(
+ r#"
+ pub struct Foo(pub i64);
+ impl Foo {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 58..72 '{ Self(0i64) }': Foo
+ 60..64 'Self': Foo(i64) -> Foo
+ 60..70 'Self(0i64)': Foo
+ 65..69 '0i64': i64
+ "#]],
+ );
+ check_infer(
+ r#"
+ pub struct Foo<T>(pub T);
+ impl Foo<i64> {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 64..78 '{ Self(0i64) }': Foo<i64>
+ 66..70 'Self': Foo<i64>(i64) -> Foo<i64>
+ 66..76 'Self(0i64)': Foo<i64>
+ 71..75 '0i64': i64
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4931() {
+ check_infer(
+ r#"
+ trait Div<T> {
+ type Output;
+ }
+
+ trait CheckedDiv: Div<()> {}
+
+ trait PrimInt: CheckedDiv<Output = ()> {
+ fn pow(self);
+ }
+
+ fn check<T: PrimInt>(i: T) {
+ i.pow();
+ }
+ "#,
+ expect![[r#"
+ 117..121 'self': Self
+ 148..149 'i': T
+ 154..170 '{ ...w(); }': ()
+ 160..161 'i': T
+ 160..167 'i.pow()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4885() {
+ check_infer(
+ r#"
+ //- minicore: coerce_unsized, future
+ use core::future::Future;
+ trait Foo<R> {
+ type Bar;
+ }
+ fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ bar(key)
+ }
+ fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ }
+ "#,
+ expect![[r#"
+ 70..73 'key': &K
+ 132..148 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
+ 138..141 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
+ 138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
+ 142..145 'key': &K
+ 162..165 'key': &K
+ 224..227 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4800() {
+ check_infer(
+ r#"
+ trait Debug {}
+
+ struct Foo<T>;
+
+ type E1<T> = (T, T, T);
+ type E2<T> = E1<E1<E1<(T, T, T)>>>;
+
+ impl Debug for Foo<E2<()>> {}
+
+ struct Request;
+
+ pub trait Future {
+ type Output;
+ }
+
+ pub struct PeerSet<D>;
+
+ impl<D> Service<Request> for PeerSet<D>
+ where
+ D: Discover,
+ D::Key: Debug,
+ {
+ type Error = ();
+ type Future = dyn Future<Output = Self::Error>;
+
+ fn call(&mut self) -> Self::Future {
+ loop {}
+ }
+ }
+
+ pub trait Discover {
+ type Key;
+ }
+
+ pub trait Service<Request> {
+ type Error;
+ type Future: Future<Output = Self::Error>;
+ fn call(&mut self) -> Self::Future;
+ }
+ "#,
+ expect![[r#"
+ 379..383 'self': &mut PeerSet<D>
+ 401..424 '{ ... }': dyn Future<Output = ()>
+ 411..418 'loop {}': !
+ 416..418 '{}': ()
+ 575..579 'self': &mut Self
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4966() {
+ check_infer(
+ r#"
+ //- minicore: deref
+ pub trait IntoIterator {
+ type Item;
+ }
+
+ struct Repeat<A> { element: A }
+
+ struct Map<F> { f: F }
+
+ struct Vec<T> {}
+
+ impl<T> core::ops::Deref for Vec<T> {
+ type Target = [T];
+ }
+
+ fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
+
+ fn main() {
+ let inner = Map { f: |_: &f64| 0.0 };
+
+ let repeat = Repeat { element: inner };
+
+ let vec = from_iter(repeat);
+
+ vec.foo_bar();
+ }
+ "#,
+ expect![[r#"
+ 225..229 'iter': T
+ 244..246 '{}': Vec<A>
+ 258..402 '{ ...r(); }': ()
+ 268..273 'inner': Map<|&f64| -> f64>
+ 276..300 'Map { ... 0.0 }': Map<|&f64| -> f64>
+ 285..298 '|_: &f64| 0.0': |&f64| -> f64
+ 286..287 '_': &f64
+ 295..298 '0.0': f64
+ 311..317 'repeat': Repeat<Map<|&f64| -> f64>>
+ 320..345 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
+ 338..343 'inner': Map<|&f64| -> f64>
+ 356..359 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..371 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..379 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 372..378 'repeat': Repeat<Map<|&f64| -> f64>>
+ 386..389 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 386..399 'vec.foo_bar()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6628() {
+ check_infer(
+ r#"
+//- minicore: fn
+struct S<T>();
+impl<T> S<T> {
+ fn f(&self, _t: T) {}
+ fn g<F: FnOnce(&T)>(&self, _f: F) {}
+}
+fn main() {
+ let s = S();
+ s.g(|_x| {});
+ s.f(10);
+}
+"#,
+ expect![[r#"
+ 40..44 'self': &S<T>
+ 46..48 '_t': T
+ 53..55 '{}': ()
+ 81..85 'self': &S<T>
+ 87..89 '_f': F
+ 94..96 '{}': ()
+ 109..160 '{ ...10); }': ()
+ 119..120 's': S<i32>
+ 123..124 'S': S<i32>() -> S<i32>
+ 123..126 'S()': S<i32>
+ 132..133 's': S<i32>
+ 132..144 's.g(|_x| {})': ()
+ 136..143 '|_x| {}': |&i32| -> ()
+ 137..139 '_x': &i32
+ 141..143 '{}': ()
+ 150..151 's': S<i32>
+ 150..157 's.f(10)': ()
+ 154..156 '10': i32
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6852() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct BufWriter {}
+
+struct Mutex<T> {}
+struct MutexGuard<'a, T> {}
+impl<T> Mutex<T> {
+ fn lock(&self) -> MutexGuard<'_, T> {}
+}
+impl<'a, T: 'a> Deref for MutexGuard<'a, T> {
+ type Target = T;
+}
+fn flush(&self) {
+ let w: &Mutex<BufWriter>;
+ *(w.lock());
+}
+"#,
+ expect![[r#"
+ 123..127 'self': &Mutex<T>
+ 150..152 '{}': MutexGuard<T>
+ 234..238 'self': &{unknown}
+ 240..290 '{ ...()); }': ()
+ 250..251 'w': &Mutex<BufWriter>
+ 276..287 '*(w.lock())': BufWriter
+ 278..279 'w': &Mutex<BufWriter>
+ 278..286 'w.lock()': MutexGuard<BufWriter>
+ "#]],
+ );
+}
+
+#[test]
+fn param_overrides_fn() {
+ check_types(
+ r#"
+ fn example(example: i32) {
+ fn f() {}
+ example;
+ //^^^^^^^ i32
+ }
+ "#,
+ )
+}
+
+#[test]
+fn lifetime_from_chalk_during_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ loop {}
+ }
+}
+
+trait Iterator {
+ type Item;
+}
+
+pub struct Iter<'a, T: 'a> {
+ inner: Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>,
+}
+
+trait IterTrait<'a, T: 'a>: Iterator<Item = &'a T> {
+ fn clone_box(&self);
+}
+
+fn clone_iter<T>(s: Iter<T>) {
+ s.inner.clone_box();
+ //^^^^^^^^^^^^^^^^^^^ ()
+}
+"#,
+ )
+}
+
+#[test]
+fn issue_8686() {
+ check_infer(
+ r#"
+pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+}
+pub trait FromResidual<R = <Self as Try>::Residual> {
+ fn from_residual(residual: R) -> Self;
+}
+
+struct ControlFlow<B, C>;
+impl<B, C> Try for ControlFlow<B, C> {
+ type Output = C;
+ type Residual = ControlFlow<B, !>;
+}
+impl<B, C> FromResidual for ControlFlow<B, C> {
+ fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow }
+}
+
+fn test() {
+ ControlFlow::from_residual(ControlFlow::<u32, !>);
+}
+ "#,
+ expect![[r#"
+ 144..152 'residual': R
+ 365..366 'r': ControlFlow<B, !>
+ 395..410 '{ ControlFlow }': ControlFlow<B, C>
+ 397..408 'ControlFlow': ControlFlow<B, C>
+ 424..482 '{ ...!>); }': ()
+ 430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
+ 430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}>
+ 457..478 'Contro...32, !>': ControlFlow<u32, !>
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_tail() {
+ // https://github.com/rust-lang/rust-analyzer/issues/8378
+ check_infer(
+ r#"
+ fn fake_tail(){
+ { "first" }
+ #[cfg(never)] 9
+ }
+ fn multiple_fake(){
+ { "fake" }
+ { "fake" }
+ { "second" }
+ #[cfg(never)] { 11 }
+ #[cfg(never)] 12;
+ #[cfg(never)] 13
+ }
+ fn no_normal_tail(){
+ { "third" }
+ #[cfg(never)] 14;
+ #[cfg(never)] 15;
+ }
+ fn no_actual_tail(){
+ { "fourth" };
+ #[cfg(never)] 14;
+ #[cfg(never)] 15
+ }
+ "#,
+ expect![[r#"
+ 14..53 '{ ...)] 9 }': ()
+ 20..31 '{ "first" }': ()
+ 22..29 '"first"': &str
+ 72..190 '{ ...] 13 }': ()
+ 78..88 '{ "fake" }': &str
+ 80..86 '"fake"': &str
+ 93..103 '{ "fake" }': &str
+ 95..101 '"fake"': &str
+ 108..120 '{ "second" }': ()
+ 110..118 '"second"': &str
+ 210..273 '{ ... 15; }': ()
+ 216..227 '{ "third" }': ()
+ 218..225 '"third"': &str
+ 293..357 '{ ...] 15 }': ()
+ 299..311 '{ "fourth" }': &str
+ 301..309 '"fourth"': &str
+ "#]],
+ )
+}
+
+#[test]
+fn impl_trait_in_option_9530() {
+ check_types(
+ r#"
+//- minicore: sized
+struct Option<T>;
+impl<T> Option<T> {
+ fn unwrap(self) -> T { loop {} }
+}
+fn make() -> Option<impl Copy> { Option }
+trait Copy {}
+fn test() {
+ let o = make();
+ o.unwrap();
+ //^^^^^^^^^^ impl Copy
+}
+ "#,
+ )
+}
+
+#[test]
+fn bare_dyn_trait_binders_9639() {
+ check_no_mismatches(
+ r#"
+//- minicore: fn, coerce_unsized
+fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
+ loop {}
+}
+
+fn parse_arule() {
+ infix_parse((), &(|_recurse| ()))
+}
+ "#,
+ )
+}
+
+#[test]
+fn call_expected_type_closure() {
+ check_types(
+ r#"
+//- minicore: fn, option
+
+fn map<T, U>(o: Option<T>, f: impl FnOnce(T) -> U) -> Option<U> { loop {} }
+struct S {
+ field: u32
+}
+
+fn test() {
+ let o = Some(S { field: 2 });
+ let _: Option<()> = map(o, |s| { s.field; });
+ // ^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_diesel_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option
+
+trait TypeMetadata {
+ type MetadataLookup;
+}
+
+pub struct Output<'a, T, DB>
+where
+ DB: TypeMetadata,
+ DB::MetadataLookup: 'a,
+{
+ out: T,
+ metadata_lookup: Option<&'a DB::MetadataLookup>,
+}
+
+impl<'a, T, DB: TypeMetadata> Output<'a, T, DB> {
+ pub fn new(out: T, metadata_lookup: &'a DB::MetadataLookup) -> Self {
+ Output {
+ out,
+ metadata_lookup: Some(metadata_lookup),
+ }
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn bitslice_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option, deref
+
+pub trait BitView {
+ type Store;
+}
+
+pub struct Lsb0;
+
+pub struct BitArray<V: BitView> { }
+
+pub struct BitSlice<T> { }
+
+impl<V: BitView> core::ops::Deref for BitArray<V> {
+ type Target = BitSlice<V::Store>;
+}
+
+impl<T> BitSlice<T> {
+ pub fn split_first(&self) -> Option<(T, &Self)> { loop {} }
+}
+
+fn multiexp_inner() {
+ let exp: &BitArray<Foo>;
+ exp.split_first();
+}
+ "#,
+ );
+}
+
+#[test]
+fn macro_expands_to_impl_trait() {
+ check_no_mismatches(
+ r#"
+trait Foo {}
+
+macro_rules! ty {
+ () => {
+ impl Foo
+ }
+}
+
+fn foo(_: ty!()) {}
+
+fn bar() {
+ foo(());
+}
+ "#,
+ )
+}
+
+#[test]
+fn nested_macro_in_fn_params() {
+ check_no_mismatches(
+ r#"
+macro_rules! U32Inner {
+ () => {
+ u32
+ };
+}
+
+macro_rules! U32 {
+ () => {
+ U32Inner!()
+ };
+}
+
+fn mamba(a: U32!(), p: u32) -> u32 {
+ a
+}
+ "#,
+ )
+}
+
+#[test]
+fn for_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ for _ in { let x = 0; } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..68 '{ ... } }': ()
+ 16..66 'for _ ... }': ()
+ 20..21 '_': {unknown}
+ 25..39 '{ let x = 0; }': ()
+ 31..32 'x': i32
+ 35..36 '0': i32
+ 40..66 '{ ... }': ()
+ 54..55 'y': i32
+ 58..59 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn while_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ while { true } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..59 '{ ... } }': ()
+ 16..57 'while ... }': ()
+ 22..30 '{ true }': bool
+ 24..28 'true': bool
+ 31..57 '{ ... }': ()
+ 45..46 'y': i32
+ 49..50 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn bug_11242() {
+ // FIXME: wrong, should be u32
+ check_types(
+ r#"
+fn foo<A, B>()
+where
+ A: IntoIterator<Item = u32>,
+ B: IntoIterator<Item = usize>,
+{
+ let _x: <A as IntoIterator>::Item;
+ // ^^ {unknown}
+}
+
+pub trait Iterator {
+ type Item;
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+}
+
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+}
+"#,
+ );
+}
+
+#[test]
+fn bug_11659() {
+ check_no_mismatches(
+ r#"
+struct LinkArray<const N: usize, LD>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<N, LD> {
+ let r = LinkArray::<N, LD>(x);
+ r
+}
+
+fn test() {
+ let x = f::<2, i32>(5);
+ let y = LinkArray::<52, LinkArray<2, i32>>(x);
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+struct LinkArray<LD, const N: usize>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<LD, N> {
+ let r = LinkArray::<LD, N>(x);
+ r
+}
+
+fn test() {
+ let x = f::<i32, 2>(5);
+ let y = LinkArray::<LinkArray<i32, 2>, 52>(x);
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_error_tolerance() {
+ check_no_mismatches(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+struct CT<const N: usize, T>(T);
+struct TC<T, const N: usize>(T);
+fn f<const N: usize, T>(x: T) -> (CT<N, T>, TC<T, N>) {
+ let l = CT::<N, T>(x);
+ let r = TC::<N, T>(x);
+ (l, r)
+}
+
+trait TR1<const N: usize>;
+trait TR2<const N: usize>;
+
+impl<const N: usize, T> TR1<N> for CT<N, T>;
+impl<const N: usize, T> TR1<5> for TC<T, N>;
+impl<const N: usize, T> TR2<N> for CT<T, N>;
+
+trait TR3<const N: usize> {
+ fn tr3(&self) -> &Self;
+}
+
+impl<const N: usize, T> TR3<5> for TC<T, N> {
+ fn tr3(&self) -> &Self {
+ self
+ }
+}
+
+impl<const N: usize, T> TR3<Item = 5> for TC<T, N> {}
+impl<const N: usize, T> TR3<T> for TC<T, N> {}
+
+fn impl_trait<const N: usize>(inp: impl TR1<N>) {}
+fn dyn_trait<const N: usize>(inp: &dyn TR2<N>) {}
+fn impl_trait_bad<'a, const N: usize>(inp: impl TR1<i32>) -> impl TR1<'a, i32> {}
+fn impl_trait_very_bad<const N: usize>(inp: impl TR1<Item = i32>) -> impl TR1<'a, Item = i32, 5, Foo = N> {}
+
+fn test() {
+ f::<2, i32>(5);
+ f::<2, 2>(5);
+ f(5);
+ f::<i32>(5);
+ CT::<52, CT<2, i32>>(x);
+ CT::<CT<2, i32>>(x);
+ impl_trait_bad(5);
+ impl_trait_bad(12);
+ TR3<5>::tr3();
+ TR3<{ 2+3 }>::tr3();
+ TC::<i32, 10>(5).tr3();
+ TC::<i32, 20>(5).tr3();
+ TC::<i32, i32>(5).tr3();
+ TC::<i32, { 7 + 3 }>(5).tr3();
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_impl_trait() {
+ check_no_mismatches(
+ r#"
+ //- minicore: from
+
+ struct Foo<T, const M: usize>;
+
+ trait Tr<T> {
+ fn f(T) -> Self;
+ }
+
+ impl<T, const M: usize> Tr<[T; M]> for Foo<T, M> {
+ fn f(_: [T; M]) -> Self {
+ Self
+ }
+ }
+
+ fn test() {
+ Foo::f([1, 2, 7, 10]);
+ }
+ "#,
+ );
+}
+
+#[test]
+fn nalgebra_factorial() {
+ check_no_mismatches(
+ r#"
+ const FACTORIAL: [u128; 4] = [1, 1, 2, 6];
+
+ fn factorial(n: usize) -> u128 {
+ match FACTORIAL.get(n) {
+ Some(f) => *f,
+ None => panic!("{}! is greater than u128::MAX", n),
+ }
+ }
+ "#,
+ )
+}
+
+#[test]
+fn regression_11688_1() {
+ check_no_mismatches(
+ r#"
+ pub struct Buffer<T>(T);
+ type Writer = Buffer<u8>;
+ impl<T> Buffer<T> {
+ fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
+ loop {}
+ }
+ }
+ trait Encode<S> {
+ fn encode(self, w: &mut Writer, s: &mut S);
+ }
+ impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_2() {
+ check_types(
+ r#"
+ union MaybeUninit<T> {
+ uninit: (),
+ value: T,
+ }
+
+ impl<T> MaybeUninit<T> {
+ fn uninit_array<const LEN: usize>() -> [Self; LEN] {
+ loop {}
+ }
+ }
+
+ fn main() {
+ let x = MaybeUninit::<i32>::uninit_array::<1>();
+ //^ [MaybeUninit<i32>; 1]
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_3() {
+ check_types(
+ r#"
+ //- minicore: iterator
+ struct Ar<T, const N: u8>(T);
+ fn f<const LEN: usize, T, const BASE: u8>(
+ num_zeros: usize,
+ ) -> dyn Iterator<Item = [Ar<T, BASE>; LEN]> {
+ loop {}
+ }
+ fn dynamic_programming() {
+ for board in f::<9, u8, 7>(1) {
+ //^^^^^ [Ar<u8, 7>; 9]
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_4() {
+ check_types(
+ r#"
+ trait Bar<const C: usize> {
+ fn baz(&self) -> [i32; C];
+ }
+
+ fn foo(x: &dyn Bar<2>) {
+ x.baz();
+ //^^^^^^^ [i32; 2]
+ }
+ "#,
+ )
+}
+
+#[test]
+fn gat_crash_1() {
+ cov_mark::check!(ignore_gats);
+ check_no_mismatches(
+ r#"
+trait ATrait {}
+
+trait Crash {
+ type Member<const N: usize>: ATrait;
+ fn new<const N: usize>() -> Self::Member<N>;
+}
+
+fn test<T: Crash>() {
+ T::new();
+}
+"#,
+ );
+}
+
+#[test]
+fn gat_crash_2() {
+ check_no_mismatches(
+ r#"
+pub struct InlineStorage {}
+
+pub struct InlineStorageHandle<T: ?Sized> {}
+
+pub unsafe trait Storage {
+ type Handle<T: ?Sized>;
+ fn create<T: ?Sized>() -> Self::Handle<T>;
+}
+
+unsafe impl Storage for InlineStorage {
+ type Handle<T: ?Sized> = InlineStorageHandle<T>;
+}
+"#,
+ );
+}
+
+#[test]
+fn cfgd_out_self_param() {
+ cov_mark::check!(cfgd_out_self_param);
+ check_no_mismatches(
+ r#"
+struct S;
+impl S {
+ fn f(#[cfg(never)] &self) {}
+}
+
+fn f(s: S) {
+ s.f();
+}
+"#,
+ );
+}
+
+#[test]
+fn rust_161_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Drop + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+pub trait Destruct {}
+
+impl<T: ?Sized> const Destruct for T {}
+"#,
+ );
+}
+
+#[test]
+fn rust_162_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+#[lang = "destruct"]
+pub trait Destruct {}
+"#,
+ );
+}
+
+#[test]
+fn tuple_struct_pattern_with_unmatched_args_crash() {
+ check_infer(
+ r#"
+struct S(usize);
+fn main() {
+ let S(.., a, b) = S(1);
+ let (.., a, b) = (1,);
+}
+ "#,
+ expect![[r#"
+ 27..85 '{ ...1,); }': ()
+ 37..48 'S(.., a, b)': S
+ 43..44 'a': usize
+ 46..47 'b': {unknown}
+ 51..52 'S': S(usize) -> S
+ 51..55 'S(1)': S
+ 53..54 '1': usize
+ 65..75 '(.., a, b)': (i32, {unknown})
+ 70..71 'a': i32
+ 73..74 'b': {unknown}
+ 78..82 '(1,)': (i32,)
+ 79..80 '1': i32
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
new file mode 100644
index 000000000..5b08f5521
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -0,0 +1,3072 @@
+use expect_test::expect;
+
+use super::{check, check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_box() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; 1]>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod prelude {}
+
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized> {
+ inner: *mut T,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_box_with_allocator() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; 1], {unknown}>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized, A: Allocator> {
+ inner: *mut T,
+ allocator: A,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_adt_self() {
+ check_types(
+ r#"
+enum Nat { Succ(Self), Demo(Nat), Zero }
+
+fn test() {
+ let foo: Nat = Nat::Zero;
+ if let Nat::Succ(x) = foo {
+ x;
+ } //^ Nat
+}
+"#,
+ );
+}
+
+#[test]
+fn self_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ impl S<u32> {
+ fn foo() {
+ Self { x: 1 };
+ }
+ }
+ "#,
+ expect![[r#"
+ 49..79 '{ ... }': ()
+ 59..72 'Self { x: 1 }': S<u32>
+ 69..70 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn type_alias_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ type SS = S<u32>;
+
+ fn foo() {
+ SS { x: 1 };
+ }
+ "#,
+ expect![[r#"
+ 50..70 '{ ...1 }; }': ()
+ 56..67 'SS { x: 1 }': S<u32>
+ 64..65 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_ranges() {
+ check_types(
+ r#"
+//- minicore: range
+fn test() {
+ let a = ..;
+ let b = 1..;
+ let c = ..2u32;
+ let d = 1..2usize;
+ let e = ..=10;
+ let f = 'a'..='z';
+
+ let t = (a, b, c, d, e, f);
+ t;
+} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
+"#,
+ );
+}
+
+#[test]
+fn infer_while_let() {
+ check_types(
+ r#"
+enum Option<T> { Some(T), None }
+
+fn test() {
+ let foo: Option<f32> = None;
+ while let Option::Some(x) = foo {
+ x;
+ } //^ f32
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_basics() {
+ check_infer(
+ r#"
+fn test(a: u32, b: isize, c: !, d: &str) {
+ a;
+ b;
+ c;
+ d;
+ 1usize;
+ 1isize;
+ "test";
+ 1.0f32;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': u32
+ 16..17 'b': isize
+ 26..27 'c': !
+ 32..33 'd': &str
+ 41..120 '{ ...f32; }': ()
+ 47..48 'a': u32
+ 54..55 'b': isize
+ 61..62 'c': !
+ 68..69 'd': &str
+ 75..81 '1usize': usize
+ 87..93 '1isize': isize
+ 99..105 '"test"': &str
+ 111..117 '1.0f32': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_let() {
+ check_infer(
+ r#"
+fn test() {
+ let a = 1isize;
+ let b: usize = 1;
+ let c = b;
+ let d: u32;
+ let e;
+ let f: i32 = e;
+}
+"#,
+ expect![[r#"
+ 10..117 '{ ...= e; }': ()
+ 20..21 'a': isize
+ 24..30 '1isize': isize
+ 40..41 'b': usize
+ 51..52 '1': usize
+ 62..63 'c': usize
+ 66..67 'b': usize
+ 77..78 'd': u32
+ 93..94 'e': i32
+ 104..105 'f': i32
+ 113..114 'e': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paths() {
+ check_infer(
+ r#"
+fn a() -> u32 { 1 }
+
+mod b {
+ fn c() -> u32 { 1 }
+}
+
+fn test() {
+ a();
+ b::c();
+}
+"#,
+ expect![[r#"
+ 14..19 '{ 1 }': u32
+ 16..17 '1': u32
+ 47..52 '{ 1 }': u32
+ 49..50 '1': u32
+ 66..90 '{ ...c(); }': ()
+ 72..73 'a': fn a() -> u32
+ 72..75 'a()': u32
+ 81..85 'b::c': fn c() -> u32
+ 81..87 'b::c()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_type() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn foo() -> i32 { 1 }
+}
+
+fn test() {
+ S::foo();
+ <S>::foo();
+}
+"#,
+ expect![[r#"
+ 40..45 '{ 1 }': i32
+ 42..43 '1': i32
+ 59..92 '{ ...o(); }': ()
+ 65..71 'S::foo': fn foo() -> i32
+ 65..73 'S::foo()': i32
+ 79..87 '<S>::foo': fn foo() -> i32
+ 79..89 '<S>::foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+ c: C,
+}
+struct B;
+struct C(usize);
+
+fn test() {
+ let c = C(1);
+ B;
+ let a: A = A { b: B, c: C(1) };
+ a.b;
+ a.c;
+}
+"#,
+ expect![[r#"
+ 71..153 '{ ...a.c; }': ()
+ 81..82 'c': C
+ 85..86 'C': C(usize) -> C
+ 85..89 'C(1)': C
+ 87..88 '1': usize
+ 95..96 'B': B
+ 106..107 'a': A
+ 113..132 'A { b:...C(1) }': A
+ 120..121 'B': B
+ 126..127 'C': C(usize) -> C
+ 126..130 'C(1)': C
+ 128..129 '1': usize
+ 138..139 'a': A
+ 138..141 'a.b': B
+ 147..148 'a': A
+ 147..150 'a.c': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_enum() {
+ check_infer(
+ r#"
+enum E {
+ V1 { field: u32 },
+ V2
+}
+fn test() {
+ E::V1 { field: 1 };
+ E::V2;
+}
+"#,
+ expect![[r#"
+ 51..89 '{ ...:V2; }': ()
+ 57..75 'E::V1 ...d: 1 }': E
+ 72..73 '1': u32
+ 81..86 'E::V2': E
+ "#]],
+ );
+}
+
+#[test]
+fn infer_union() {
+ check_infer(
+ r#"
+union MyUnion {
+ foo: u32,
+ bar: f32,
+}
+
+fn test() {
+ let u = MyUnion { foo: 0 };
+ unsafe { baz(u); }
+ let u = MyUnion { bar: 0.0 };
+ unsafe { baz(u); }
+}
+
+unsafe fn baz(u: MyUnion) {
+ let inner = u.foo;
+ let inner = u.bar;
+}
+"#,
+ expect![[r#"
+ 57..172 '{ ...); } }': ()
+ 67..68 'u': MyUnion
+ 71..89 'MyUnio...o: 0 }': MyUnion
+ 86..87 '0': u32
+ 95..113 'unsafe...(u); }': ()
+ 95..113 'unsafe...(u); }': ()
+ 104..107 'baz': fn baz(MyUnion)
+ 104..110 'baz(u)': ()
+ 108..109 'u': MyUnion
+ 122..123 'u': MyUnion
+ 126..146 'MyUnio... 0.0 }': MyUnion
+ 141..144 '0.0': f32
+ 152..170 'unsafe...(u); }': ()
+ 152..170 'unsafe...(u); }': ()
+ 161..164 'baz': fn baz(MyUnion)
+ 161..167 'baz(u)': ()
+ 165..166 'u': MyUnion
+ 188..189 'u': MyUnion
+ 200..249 '{ ...bar; }': ()
+ 210..215 'inner': u32
+ 218..219 'u': MyUnion
+ 218..223 'u.foo': u32
+ 233..238 'inner': f32
+ 241..242 'u': MyUnion
+ 241..246 'u.bar': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_refs() {
+ check_infer(
+ r#"
+fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
+ a;
+ *a;
+ &a;
+ &mut a;
+ b;
+ *b;
+ &b;
+ c;
+ *c;
+ d;
+ *d;
+}
+ "#,
+ expect![[r#"
+ 8..9 'a': &u32
+ 17..18 'b': &mut u32
+ 30..31 'c': *const u32
+ 45..46 'd': *mut u32
+ 58..149 '{ ... *d; }': ()
+ 64..65 'a': &u32
+ 71..73 '*a': u32
+ 72..73 'a': &u32
+ 79..81 '&a': &&u32
+ 80..81 'a': &u32
+ 87..93 '&mut a': &mut &u32
+ 92..93 'a': &u32
+ 99..100 'b': &mut u32
+ 106..108 '*b': u32
+ 107..108 'b': &mut u32
+ 114..116 '&b': &&mut u32
+ 115..116 'b': &mut u32
+ 122..123 'c': *const u32
+ 129..131 '*c': u32
+ 130..131 'c': *const u32
+ 137..138 'd': *mut u32
+ 144..146 '*d': u32
+ 145..146 'd': *mut u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_raw_ref() {
+ check_infer(
+ r#"
+fn test(a: i32) {
+ &raw mut a;
+ &raw const a;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': i32
+ 16..53 '{ ...t a; }': ()
+ 22..32 '&raw mut a': *mut i32
+ 31..32 'a': i32
+ 38..50 '&raw const a': *const i32
+ 49..50 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literals() {
+ check_infer(
+ r##"
+ fn test() {
+ 5i32;
+ 5f32;
+ 5f64;
+ "hello";
+ b"bytes";
+ 'c';
+ b'b';
+ 3.14;
+ 5000;
+ false;
+ true;
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#;
+ br#"yolo"#;
+ let a = b"a\x20b\
+ c";
+ let b = br"g\
+h";
+ let c = br#"x"\"yb"#;
+ }
+ "##,
+ expect![[r##"
+ 18..478 '{ ... }': ()
+ 32..36 '5i32': i32
+ 50..54 '5f32': f32
+ 68..72 '5f64': f64
+ 86..93 '"hello"': &str
+ 107..115 'b"bytes"': &[u8; 5]
+ 129..132 ''c'': char
+ 146..150 'b'b'': u8
+ 164..168 '3.14': f64
+ 182..186 '5000': i32
+ 200..205 'false': bool
+ 219..223 'true': bool
+ 237..333 'r#" ... "#': &str
+ 347..357 'br#"yolo"#': &[u8; 4]
+ 375..376 'a': &[u8; 4]
+ 379..403 'b"a\x2... c"': &[u8; 4]
+ 421..422 'b': &[u8; 4]
+ 425..433 'br"g\ h"': &[u8; 4]
+ 451..452 'c': &[u8; 6]
+ 455..467 'br#"x"\"yb"#': &[u8; 6]
+ "##]],
+ );
+}
+
+#[test]
+fn infer_unary_op() {
+ check_infer(
+ r#"
+enum SomeType {}
+
+fn test(x: SomeType) {
+ let b = false;
+ let c = !b;
+ let a = 100;
+ let d: i128 = -a;
+ let e = -100;
+ let f = !!!true;
+ let g = !42;
+ let h = !10u32;
+ let j = !a;
+ -3.14;
+ !3;
+ -x;
+ !x;
+ -"hello";
+ !"hello";
+}
+"#,
+ expect![[r#"
+ 26..27 'x': SomeType
+ 39..271 '{ ...lo"; }': ()
+ 49..50 'b': bool
+ 53..58 'false': bool
+ 68..69 'c': bool
+ 72..74 '!b': bool
+ 73..74 'b': bool
+ 84..85 'a': i128
+ 88..91 '100': i128
+ 101..102 'd': i128
+ 111..113 '-a': i128
+ 112..113 'a': i128
+ 123..124 'e': i32
+ 127..131 '-100': i32
+ 128..131 '100': i32
+ 141..142 'f': bool
+ 145..152 '!!!true': bool
+ 146..152 '!!true': bool
+ 147..152 '!true': bool
+ 148..152 'true': bool
+ 162..163 'g': i32
+ 166..169 '!42': i32
+ 167..169 '42': i32
+ 179..180 'h': u32
+ 183..189 '!10u32': u32
+ 184..189 '10u32': u32
+ 199..200 'j': i128
+ 203..205 '!a': i128
+ 204..205 'a': i128
+ 211..216 '-3.14': f64
+ 212..216 '3.14': f64
+ 222..224 '!3': i32
+ 223..224 '3': i32
+ 230..232 '-x': {unknown}
+ 231..232 'x': SomeType
+ 238..240 '!x': {unknown}
+ 239..240 'x': SomeType
+ 246..254 '-"hello"': {unknown}
+ 247..254 '"hello"': &str
+ 260..268 '!"hello"': {unknown}
+ 261..268 '"hello"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_backwards() {
+ check_infer(
+ r#"
+fn takes_u32(x: u32) {}
+
+struct S { i32_field: i32 }
+
+fn test() -> &mut &f64 {
+ let a = unknown_function();
+ takes_u32(a);
+ let b = unknown_function();
+ S { i32_field: b };
+ let c = unknown_function();
+ &mut &c
+}
+"#,
+ expect![[r#"
+ 13..14 'x': u32
+ 21..23 '{}': ()
+ 77..230 '{ ...t &c }': &mut &f64
+ 87..88 'a': u32
+ 91..107 'unknow...nction': {unknown}
+ 91..109 'unknow...tion()': u32
+ 115..124 'takes_u32': fn takes_u32(u32)
+ 115..127 'takes_u32(a)': ()
+ 125..126 'a': u32
+ 137..138 'b': i32
+ 141..157 'unknow...nction': {unknown}
+ 141..159 'unknow...tion()': i32
+ 165..183 'S { i3...d: b }': S
+ 180..181 'b': i32
+ 193..194 'c': f64
+ 197..213 'unknow...nction': {unknown}
+ 197..215 'unknow...tion()': f64
+ 221..228 '&mut &c': &mut &f64
+ 226..228 '&c': &f64
+ 227..228 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn test(&self) {
+ self;
+ }
+ fn test2(self: &Self) {
+ self;
+ }
+ fn test3() -> Self {
+ S {}
+ }
+ fn test4() -> Self {
+ Self {}
+ }
+}
+"#,
+ expect![[r#"
+ 33..37 'self': &S
+ 39..60 '{ ... }': ()
+ 49..53 'self': &S
+ 74..78 'self': &S
+ 87..108 '{ ... }': ()
+ 97..101 'self': &S
+ 132..152 '{ ... }': S
+ 142..146 'S {}': S
+ 176..199 '{ ... }': S
+ 186..193 'Self {}': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self_as_path() {
+ check_infer(
+ r#"
+struct S1;
+struct S2(isize);
+enum E {
+ V1,
+ V2(u32),
+}
+
+impl S1 {
+ fn test() {
+ Self;
+ }
+}
+impl S2 {
+ fn test() {
+ Self(1);
+ }
+}
+impl E {
+ fn test() {
+ Self::V1;
+ Self::V2(1);
+ }
+}
+"#,
+ expect![[r#"
+ 86..107 '{ ... }': ()
+ 96..100 'Self': S1
+ 134..158 '{ ... }': ()
+ 144..148 'Self': S2(isize) -> S2
+ 144..151 'Self(1)': S2
+ 149..150 '1': isize
+ 184..230 '{ ... }': ()
+ 194..202 'Self::V1': E
+ 212..220 'Self::V2': V2(u32) -> E
+ 212..223 'Self::V2(1)': E
+ 221..222 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_binary_op() {
+ check_infer(
+ r#"
+fn f(x: bool) -> i32 {
+ 0i32
+}
+
+fn test() -> bool {
+ let x = a && b;
+ let y = true || false;
+ let z = x == y;
+ let t = x != y;
+ let minus_forty: isize = -40isize;
+ let h = minus_forty <= CONST_2;
+ let c = f(z || y) + 5;
+ let d = b;
+ let g = minus_forty ^= i;
+ let ten: usize = 10;
+ let ten_is_eleven = ten == some_num;
+
+ ten < 3
+}
+"#,
+ expect![[r#"
+ 5..6 'x': bool
+ 21..33 '{ 0i32 }': i32
+ 27..31 '0i32': i32
+ 53..369 '{ ... < 3 }': bool
+ 63..64 'x': bool
+ 67..68 'a': bool
+ 67..73 'a && b': bool
+ 72..73 'b': bool
+ 83..84 'y': bool
+ 87..91 'true': bool
+ 87..100 'true || false': bool
+ 95..100 'false': bool
+ 110..111 'z': bool
+ 114..115 'x': bool
+ 114..120 'x == y': bool
+ 119..120 'y': bool
+ 130..131 't': bool
+ 134..135 'x': bool
+ 134..140 'x != y': bool
+ 139..140 'y': bool
+ 150..161 'minus_forty': isize
+ 171..179 '-40isize': isize
+ 172..179 '40isize': isize
+ 189..190 'h': bool
+ 193..204 'minus_forty': isize
+ 193..215 'minus_...ONST_2': bool
+ 208..215 'CONST_2': isize
+ 225..226 'c': i32
+ 229..230 'f': fn f(bool) -> i32
+ 229..238 'f(z || y)': i32
+ 229..242 'f(z || y) + 5': i32
+ 231..232 'z': bool
+ 231..237 'z || y': bool
+ 236..237 'y': bool
+ 241..242 '5': i32
+ 252..253 'd': {unknown}
+ 256..257 'b': {unknown}
+ 267..268 'g': ()
+ 271..282 'minus_forty': isize
+ 271..287 'minus_...y ^= i': ()
+ 286..287 'i': isize
+ 297..300 'ten': usize
+ 310..312 '10': usize
+ 322..335 'ten_is_eleven': bool
+ 338..341 'ten': usize
+ 338..353 'ten == some_num': bool
+ 345..353 'some_num': usize
+ 360..363 'ten': usize
+ 360..367 'ten < 3': bool
+ 366..367 '3': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_shift_op() {
+ check_infer(
+ r#"
+fn test() {
+ 1u32 << 5u8;
+ 1u32 >> 5u8;
+}
+"#,
+ expect![[r#"
+ 10..47 '{ ...5u8; }': ()
+ 16..20 '1u32': u32
+ 16..27 '1u32 << 5u8': u32
+ 24..27 '5u8': u8
+ 33..37 '1u32': u32
+ 33..44 '1u32 >> 5u8': u32
+ 41..44 '5u8': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_field_autoderef() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+}
+struct B;
+
+fn test1(a: A) {
+ let a1 = a;
+ a1.b;
+ let a2 = &a;
+ a2.b;
+ let a3 = &mut a;
+ a3.b;
+ let a4 = &&&&&&&a;
+ a4.b;
+ let a5 = &mut &&mut &&mut a;
+ a5.b;
+}
+
+fn test2(a1: *const A, a2: *mut A) {
+ a1.b;
+ a2.b;
+}
+"#,
+ expect![[r#"
+ 43..44 'a': A
+ 49..212 '{ ...5.b; }': ()
+ 59..61 'a1': A
+ 64..65 'a': A
+ 71..73 'a1': A
+ 71..75 'a1.b': B
+ 85..87 'a2': &A
+ 90..92 '&a': &A
+ 91..92 'a': A
+ 98..100 'a2': &A
+ 98..102 'a2.b': B
+ 112..114 'a3': &mut A
+ 117..123 '&mut a': &mut A
+ 122..123 'a': A
+ 129..131 'a3': &mut A
+ 129..133 'a3.b': B
+ 143..145 'a4': &&&&&&&A
+ 148..156 '&&&&&&&a': &&&&&&&A
+ 149..156 '&&&&&&a': &&&&&&A
+ 150..156 '&&&&&a': &&&&&A
+ 151..156 '&&&&a': &&&&A
+ 152..156 '&&&a': &&&A
+ 153..156 '&&a': &&A
+ 154..156 '&a': &A
+ 155..156 'a': A
+ 162..164 'a4': &&&&&&&A
+ 162..166 'a4.b': B
+ 176..178 'a5': &mut &&mut &&mut A
+ 181..199 '&mut &...&mut a': &mut &&mut &&mut A
+ 186..199 '&&mut &&mut a': &&mut &&mut A
+ 187..199 '&mut &&mut a': &mut &&mut A
+ 192..199 '&&mut a': &&mut A
+ 193..199 '&mut a': &mut A
+ 198..199 'a': A
+ 205..207 'a5': &mut &&mut &&mut A
+ 205..209 'a5.b': B
+ 223..225 'a1': *const A
+ 237..239 'a2': *mut A
+ 249..272 '{ ...2.b; }': ()
+ 255..257 'a1': *const A
+ 255..259 'a1.b': B
+ 265..267 'a2': *mut A
+ 265..269 'a2.b': B
+ "#]],
+ );
+}
+
+#[test]
+fn infer_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(T);
+
+impl<T> A<T> {
+ fn foo(&self) -> &T {
+ &self.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test() {
+ let t = A::foo(&&B(B(A(42))));
+}
+"#,
+ expect![[r#"
+ 66..70 'self': &A<T>
+ 78..101 '{ ... }': &T
+ 88..95 '&self.0': &T
+ 89..93 'self': &A<T>
+ 89..95 'self.0': T
+ 182..186 'self': &B<T>
+ 205..228 '{ ... }': &T
+ 215..222 '&self.0': &T
+ 216..220 'self': &B<T>
+ 216..222 'self.0': T
+ 242..280 '{ ...))); }': ()
+ 252..253 't': &i32
+ 256..262 'A::foo': fn foo<i32>(&A<i32>) -> &i32
+ 256..277 'A::foo...42))))': &i32
+ 263..276 '&&B(B(A(42)))': &&B<B<A<i32>>>
+ 264..276 '&B(B(A(42)))': &B<B<A<i32>>>
+ 265..266 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 265..276 'B(B(A(42)))': B<B<A<i32>>>
+ 267..268 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 267..275 'B(A(42))': B<A<i32>>
+ 269..270 'A': A<i32>(i32) -> A<i32>
+ 269..274 'A(42)': A<i32>
+ 271..273 '42': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_method_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(*mut T);
+
+impl<T> A<T> {
+ fn foo(&self, x: &A<T>) -> &T {
+ &*x.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test(a: A<i32>) {
+ let t = A(0 as *mut _).foo(&&B(B(a)));
+}
+"#,
+ expect![[r#"
+ 71..75 'self': &A<T>
+ 77..78 'x': &A<T>
+ 93..114 '{ ... }': &T
+ 103..108 '&*x.0': &T
+ 104..108 '*x.0': T
+ 105..106 'x': &A<T>
+ 105..108 'x.0': *mut T
+ 195..199 'self': &B<T>
+ 218..241 '{ ... }': &T
+ 228..235 '&self.0': &T
+ 229..233 'self': &B<T>
+ 229..235 'self.0': T
+ 253..254 'a': A<i32>
+ 264..310 '{ ...))); }': ()
+ 274..275 't': &i32
+ 278..279 'A': A<i32>(*mut i32) -> A<i32>
+ 278..292 'A(0 as *mut _)': A<i32>
+ 278..307 'A(0 as...B(a)))': &i32
+ 280..281 '0': i32
+ 280..291 '0 as *mut _': *mut i32
+ 297..306 '&&B(B(a))': &&B<B<A<i32>>>
+ 298..306 '&B(B(a))': &B<B<A<i32>>>
+ 299..300 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 299..306 'B(B(a))': B<B<A<i32>>>
+ 301..302 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 301..305 'B(a)': B<A<i32>>
+ 303..304 'a': A<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_elseif() {
+ check_infer(
+ r#"
+struct Foo { field: i32 }
+fn main(foo: Foo) {
+ if true {
+
+ } else if false {
+ foo.field
+ }
+}
+"#,
+ expect![[r#"
+ 34..37 'foo': Foo
+ 44..108 '{ ... } }': ()
+ 50..106 'if tru... }': ()
+ 53..57 'true': bool
+ 58..66 '{ }': ()
+ 72..106 'if fal... }': ()
+ 75..80 'false': bool
+ 81..106 '{ ... }': ()
+ 91..94 'foo': Foo
+ 91..100 'foo.field': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_if_match_with_return() {
+ check_infer(
+ r#"
+fn foo() {
+ let _x1 = if true {
+ 1
+ } else {
+ return;
+ };
+ let _x2 = if true {
+ 2
+ } else {
+ return
+ };
+ let _x3 = match true {
+ true => 3,
+ _ => {
+ return;
+ }
+ };
+ let _x4 = match true {
+ true => 4,
+ _ => return
+ };
+}
+"#,
+ expect![[r#"
+ 9..322 '{ ... }; }': ()
+ 19..22 '_x1': i32
+ 25..79 'if tru... }': i32
+ 28..32 'true': bool
+ 33..50 '{ ... }': i32
+ 43..44 '1': i32
+ 56..79 '{ ... }': i32
+ 66..72 'return': !
+ 89..92 '_x2': i32
+ 95..148 'if tru... }': i32
+ 98..102 'true': bool
+ 103..120 '{ ... }': i32
+ 113..114 '2': i32
+ 126..148 '{ ... }': !
+ 136..142 'return': !
+ 158..161 '_x3': i32
+ 164..246 'match ... }': i32
+ 170..174 'true': bool
+ 185..189 'true': bool
+ 185..189 'true': bool
+ 193..194 '3': i32
+ 204..205 '_': bool
+ 209..240 '{ ... }': i32
+ 223..229 'return': !
+ 256..259 '_x4': i32
+ 262..319 'match ... }': i32
+ 268..272 'true': bool
+ 283..287 'true': bool
+ 283..287 'true': bool
+ 291..292 '4': i32
+ 302..303 '_': bool
+ 307..313 'return': !
+ "#]],
+ )
+}
+
+#[test]
+fn infer_inherent_method() {
+ check_infer(
+ r#"
+ struct A;
+
+ impl A {
+ fn foo(self, x: u32) -> i32 {}
+ }
+
+ mod b {
+ impl super::A {
+ pub fn bar(&self, x: u64) -> i64 {}
+ }
+ }
+
+ fn test(a: A) {
+ a.foo(1);
+ (&a).bar(1);
+ a.bar(1);
+ }
+ "#,
+ expect![[r#"
+ 31..35 'self': A
+ 37..38 'x': u32
+ 52..54 '{}': i32
+ 106..110 'self': &A
+ 112..113 'x': u64
+ 127..129 '{}': i64
+ 147..148 'a': A
+ 153..201 '{ ...(1); }': ()
+ 159..160 'a': A
+ 159..167 'a.foo(1)': i32
+ 165..166 '1': u32
+ 173..184 '(&a).bar(1)': i64
+ 174..176 '&a': &A
+ 175..176 'a': A
+ 182..183 '1': u64
+ 190..191 'a': A
+ 190..198 'a.bar(1)': i64
+ 196..197 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inherent_method_str() {
+ check_infer(
+ r#"
+ #[lang = "str"]
+ impl str {
+ fn foo(&self) -> i32 {}
+ }
+
+ fn test() {
+ "foo".foo();
+ }
+ "#,
+ expect![[r#"
+ 39..43 'self': &str
+ 52..54 '{}': i32
+ 68..88 '{ ...o(); }': ()
+ 74..79 '"foo"': &str
+ 74..85 '"foo".foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a: (u32, &str) = (1, "a");
+ let b = (a, x);
+ let c = (y, x);
+ let d = (c, x);
+ let e = (1, "e");
+ let f = (e, "d");
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..169 '{ ...d"); }': ()
+ 37..38 'a': (u32, &str)
+ 54..62 '(1, "a")': (u32, &str)
+ 55..56 '1': u32
+ 58..61 '"a"': &str
+ 72..73 'b': ((u32, &str), &str)
+ 76..82 '(a, x)': ((u32, &str), &str)
+ 77..78 'a': (u32, &str)
+ 80..81 'x': &str
+ 92..93 'c': (isize, &str)
+ 96..102 '(y, x)': (isize, &str)
+ 97..98 'y': isize
+ 100..101 'x': &str
+ 112..113 'd': ((isize, &str), &str)
+ 116..122 '(c, x)': ((isize, &str), &str)
+ 117..118 'c': (isize, &str)
+ 120..121 'x': &str
+ 132..133 'e': (i32, &str)
+ 136..144 '(1, "e")': (i32, &str)
+ 137..138 '1': i32
+ 140..143 '"e"': &str
+ 154..155 'f': ((i32, &str), &str)
+ 158..166 '(e, "d")': ((i32, &str), &str)
+ 159..160 'e': (i32, &str)
+ 162..165 '"d"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a = [x];
+ let b = [a, a];
+ let c = [b, b];
+
+ let d = [y, 1, 2, 3];
+ let d = [1, y, 2, 3];
+ let e = [y];
+ let f = [d, d];
+ let g = [e, e];
+
+ let h = [1, 2];
+ let i = ["a", "b"];
+
+ let b = [a, ["b"]];
+ let x: [u8; 0] = [];
+ let y: [u8; 2+2] = [1,2,3,4];
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..326 '{ ...,4]; }': ()
+ 37..38 'a': [&str; 1]
+ 41..44 '[x]': [&str; 1]
+ 42..43 'x': &str
+ 54..55 'b': [[&str; 1]; 2]
+ 58..64 '[a, a]': [[&str; 1]; 2]
+ 59..60 'a': [&str; 1]
+ 62..63 'a': [&str; 1]
+ 74..75 'c': [[[&str; 1]; 2]; 2]
+ 78..84 '[b, b]': [[[&str; 1]; 2]; 2]
+ 79..80 'b': [[&str; 1]; 2]
+ 82..83 'b': [[&str; 1]; 2]
+ 95..96 'd': [isize; 4]
+ 99..111 '[y, 1, 2, 3]': [isize; 4]
+ 100..101 'y': isize
+ 103..104 '1': isize
+ 106..107 '2': isize
+ 109..110 '3': isize
+ 121..122 'd': [isize; 4]
+ 125..137 '[1, y, 2, 3]': [isize; 4]
+ 126..127 '1': isize
+ 129..130 'y': isize
+ 132..133 '2': isize
+ 135..136 '3': isize
+ 147..148 'e': [isize; 1]
+ 151..154 '[y]': [isize; 1]
+ 152..153 'y': isize
+ 164..165 'f': [[isize; 4]; 2]
+ 168..174 '[d, d]': [[isize; 4]; 2]
+ 169..170 'd': [isize; 4]
+ 172..173 'd': [isize; 4]
+ 184..185 'g': [[isize; 1]; 2]
+ 188..194 '[e, e]': [[isize; 1]; 2]
+ 189..190 'e': [isize; 1]
+ 192..193 'e': [isize; 1]
+ 205..206 'h': [i32; 2]
+ 209..215 '[1, 2]': [i32; 2]
+ 210..211 '1': i32
+ 213..214 '2': i32
+ 225..226 'i': [&str; 2]
+ 229..239 '["a", "b"]': [&str; 2]
+ 230..233 '"a"': &str
+ 235..238 '"b"': &str
+ 250..251 'b': [[&str; 1]; 2]
+ 254..264 '[a, ["b"]]': [[&str; 1]; 2]
+ 255..256 'a': [&str; 1]
+ 258..263 '["b"]': [&str; 1]
+ 259..262 '"b"': &str
+ 274..275 'x': [u8; 0]
+ 287..289 '[]': [u8; 0]
+ 299..300 'y': [u8; 4]
+ 314..323 '[1,2,3,4]': [u8; 4]
+ 315..316 '1': u8
+ 317..318 '2': u8
+ 319..320 '3': u8
+ 321..322 '4': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ fn test(a1: A<u32>, i: i32) {
+ a1.x;
+ let a2 = A { x: i };
+ a2.x;
+ let a3 = A::<i128> { x: 1 };
+ a3.x;
+ }
+ "#,
+ expect![[r#"
+ 35..37 'a1': A<u32>
+ 47..48 'i': i32
+ 55..146 '{ ...3.x; }': ()
+ 61..63 'a1': A<u32>
+ 61..65 'a1.x': u32
+ 75..77 'a2': A<i32>
+ 80..90 'A { x: i }': A<i32>
+ 87..88 'i': i32
+ 96..98 'a2': A<i32>
+ 96..100 'a2.x': i32
+ 110..112 'a3': A<i128>
+ 115..133 'A::<i1...x: 1 }': A<i128>
+ 130..131 '1': i128
+ 139..141 'a3': A<i128>
+ 139..143 'a3.x': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ A(42);
+ A(42u128);
+ Some("x");
+ Option::Some("x");
+ None;
+ let x: Option<i64> = None;
+ }
+ "#,
+ expect![[r#"
+ 75..183 '{ ...one; }': ()
+ 81..82 'A': A<i32>(i32) -> A<i32>
+ 81..86 'A(42)': A<i32>
+ 83..85 '42': i32
+ 92..93 'A': A<u128>(u128) -> A<u128>
+ 92..101 'A(42u128)': A<u128>
+ 94..100 '42u128': u128
+ 107..111 'Some': Some<&str>(&str) -> Option<&str>
+ 107..116 'Some("x")': Option<&str>
+ 112..115 '"x"': &str
+ 122..134 'Option::Some': Some<&str>(&str) -> Option<&str>
+ 122..139 'Option...e("x")': Option<&str>
+ 135..138 '"x"': &str
+ 145..149 'None': Option<{unknown}>
+ 159..160 'x': Option<i64>
+ 176..180 'None': Option<i64>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_function_generics() {
+ check_infer(
+ r#"
+ fn id<T>(t: T) -> T { t }
+
+ fn test() {
+ id(1u32);
+ id::<i128>(1);
+ let x: u64 = id(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 't': T
+ 20..25 '{ t }': T
+ 22..23 't': T
+ 37..97 '{ ...(1); }': ()
+ 43..45 'id': fn id<u32>(u32) -> u32
+ 43..51 'id(1u32)': u32
+ 46..50 '1u32': u32
+ 57..67 'id::<i128>': fn id<i128>(i128) -> i128
+ 57..70 'id::<i128>(1)': i128
+ 68..69 '1': i128
+ 80..81 'x': u64
+ 89..91 'id': fn id<u64>(u64) -> u64
+ 89..94 'id(1)': u64
+ 92..93 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_basic() {
+ check_infer(
+ r#"
+ struct A<T1, T2> {
+ x: T1,
+ y: T2,
+ }
+ impl<Y, X> A<X, Y> {
+ fn x(self) -> X {
+ self.x
+ }
+ fn y(self) -> Y {
+ self.y
+ }
+ fn z<T>(self, t: T) -> (X, Y, T) {
+ (self.x, self.y, t)
+ }
+ }
+
+ fn test() -> i128 {
+ let a = A { x: 1u64, y: 1i64 };
+ a.x();
+ a.y();
+ a.z(1i128);
+ a.z::<u128>(1);
+ }
+ "#,
+ expect![[r#"
+ 73..77 'self': A<X, Y>
+ 84..106 '{ ... }': X
+ 94..98 'self': A<X, Y>
+ 94..100 'self.x': X
+ 116..120 'self': A<X, Y>
+ 127..149 '{ ... }': Y
+ 137..141 'self': A<X, Y>
+ 137..143 'self.y': Y
+ 162..166 'self': A<X, Y>
+ 168..169 't': T
+ 187..222 '{ ... }': (X, Y, T)
+ 197..216 '(self.....y, t)': (X, Y, T)
+ 198..202 'self': A<X, Y>
+ 198..204 'self.x': X
+ 206..210 'self': A<X, Y>
+ 206..212 'self.y': Y
+ 214..215 't': T
+ 244..341 '{ ...(1); }': i128
+ 254..255 'a': A<u64, i64>
+ 258..280 'A { x:...1i64 }': A<u64, i64>
+ 265..269 '1u64': u64
+ 274..278 '1i64': i64
+ 286..287 'a': A<u64, i64>
+ 286..291 'a.x()': u64
+ 297..298 'a': A<u64, i64>
+ 297..302 'a.y()': i64
+ 308..309 'a': A<u64, i64>
+ 308..318 'a.z(1i128)': (u64, i64, i128)
+ 312..317 '1i128': i128
+ 324..325 'a': A<u64, i64>
+ 324..338 'a.z::<u128>(1)': (u64, i64, u128)
+ 336..337 '1': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_with_autoderef() {
+ check_infer(
+ r#"
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+ impl<T> Option<T> {
+ fn as_ref(&self) -> Option<&T> {}
+ }
+ fn test(o: Option<u32>) {
+ (&o).as_ref();
+ o.as_ref();
+ }
+ "#,
+ expect![[r#"
+ 77..81 'self': &Option<T>
+ 97..99 '{}': Option<&T>
+ 110..111 'o': Option<u32>
+ 126..164 '{ ...f(); }': ()
+ 132..145 '(&o).as_ref()': Option<&u32>
+ 133..135 '&o': &Option<u32>
+ 134..135 'o': Option<u32>
+ 151..152 'o': Option<u32>
+ 151..161 'o.as_ref()': Option<&u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generic_chain() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+ impl<T2> A<T2> {
+ fn x(self) -> T2 {
+ self.x
+ }
+ }
+ fn id<T>(t: T) -> T { t }
+
+ fn test() -> i128 {
+ let x = 1;
+ let y = id(x);
+ let a = A { x: id(y) };
+ let z = id(a.x);
+ let b = A { x: z };
+ b.x()
+ }
+ "#,
+ expect![[r#"
+ 52..56 'self': A<T2>
+ 64..86 '{ ... }': T2
+ 74..78 'self': A<T2>
+ 74..80 'self.x': T2
+ 98..99 't': T
+ 109..114 '{ t }': T
+ 111..112 't': T
+ 134..254 '{ ....x() }': i128
+ 144..145 'x': i128
+ 148..149 '1': i128
+ 159..160 'y': i128
+ 163..165 'id': fn id<i128>(i128) -> i128
+ 163..168 'id(x)': i128
+ 166..167 'x': i128
+ 178..179 'a': A<i128>
+ 182..196 'A { x: id(y) }': A<i128>
+ 189..191 'id': fn id<i128>(i128) -> i128
+ 189..194 'id(y)': i128
+ 192..193 'y': i128
+ 206..207 'z': i128
+ 210..212 'id': fn id<i128>(i128) -> i128
+ 210..217 'id(a.x)': i128
+ 213..214 'a': A<i128>
+ 213..216 'a.x': i128
+ 227..228 'b': A<i128>
+ 231..241 'A { x: z }': A<i128>
+ 238..239 'z': i128
+ 247..248 'b': A<i128>
+ 247..252 'b.x()': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_const() {
+ check_infer(
+ r#"
+ struct Struct;
+
+ impl Struct {
+ const FOO: u32 = 1;
+ }
+
+ enum Enum {}
+
+ impl Enum {
+ const BAR: u32 = 2;
+ }
+
+ trait Trait {
+ const ID: u32;
+ }
+
+ struct TraitTest;
+
+ impl Trait for TraitTest {
+ const ID: u32 = 5;
+ }
+
+ fn test() {
+ let x = Struct::FOO;
+ let y = Enum::BAR;
+ let z = TraitTest::ID;
+ }
+ "#,
+ expect![[r#"
+ 51..52 '1': u32
+ 104..105 '2': u32
+ 212..213 '5': u32
+ 228..306 '{ ...:ID; }': ()
+ 238..239 'x': u32
+ 242..253 'Struct::FOO': u32
+ 263..264 'y': u32
+ 267..276 'Enum::BAR': u32
+ 286..287 'z': u32
+ 290..303 'TraitTest::ID': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_alias() {
+ check_infer(
+ r#"
+ struct A<X, Y> { x: X, y: Y }
+ type Foo = A<u32, i128>;
+ type Bar<T> = A<T, u128>;
+ type Baz<U, V> = A<V, U>;
+ fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
+ x.x;
+ x.y;
+ y.x;
+ y.y;
+ z.x;
+ z.y;
+ }
+ mod m {
+ pub enum Enum {
+ Foo(u8),
+ }
+ pub type Alias = Enum;
+ }
+ fn f() {
+ let e = m::Alias::Foo(0);
+ let m::Alias::Foo(x) = &e;
+ }
+ "#,
+ expect![[r#"
+ 115..116 'x': A<u32, i128>
+ 123..124 'y': A<&str, u128>
+ 137..138 'z': A<u8, i8>
+ 153..210 '{ ...z.y; }': ()
+ 159..160 'x': A<u32, i128>
+ 159..162 'x.x': u32
+ 168..169 'x': A<u32, i128>
+ 168..171 'x.y': i128
+ 177..178 'y': A<&str, u128>
+ 177..180 'y.x': &str
+ 186..187 'y': A<&str, u128>
+ 186..189 'y.y': u128
+ 195..196 'z': A<u8, i8>
+ 195..198 'z.x': u8
+ 204..205 'z': A<u8, i8>
+ 204..207 'z.y': i8
+ 298..362 '{ ... &e; }': ()
+ 308..309 'e': Enum
+ 312..325 'm::Alias::Foo': Foo(u8) -> Enum
+ 312..328 'm::Ali...Foo(0)': Enum
+ 326..327 '0': u8
+ 338..354 'm::Ali...Foo(x)': Enum
+ 352..353 'x': &u8
+ 357..359 '&e': &Enum
+ 358..359 'e': Enum
+ "#]],
+ )
+}
+
+#[test]
+fn recursive_type_alias() {
+ check_infer(
+ r#"
+ struct A<X> {}
+ type Foo = Foo;
+ type Bar = A<Bar>;
+ fn test(x: Foo) {}
+ "#,
+ expect![[r#"
+ 58..59 'x': {unknown}
+ 66..68 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_type_param() {
+ check_infer(
+ r#"
+ fn id<T>(x: T) -> T {
+ x
+ }
+
+ fn clone<T>(x: &T) -> T {
+ *x
+ }
+
+ fn test() {
+ let y = 10u32;
+ id(y);
+ let x: bool = clone(z);
+ id::<i128>(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 'x': T
+ 20..29 '{ x }': T
+ 26..27 'x': T
+ 43..44 'x': &T
+ 55..65 '{ *x }': T
+ 61..63 '*x': T
+ 62..63 'x': &T
+ 77..157 '{ ...(1); }': ()
+ 87..88 'y': u32
+ 91..96 '10u32': u32
+ 102..104 'id': fn id<u32>(u32) -> u32
+ 102..107 'id(y)': u32
+ 105..106 'y': u32
+ 117..118 'x': bool
+ 127..132 'clone': fn clone<bool>(&bool) -> bool
+ 127..135 'clone(z)': bool
+ 133..134 'z': &bool
+ 141..151 'id::<i128>': fn id<i128>(i128) -> i128
+ 141..154 'id::<i128>(1)': i128
+ 152..153 '1': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const() {
+ check_infer(
+ r#"
+ struct Foo;
+ impl Foo { const ASSOC_CONST: u32 = 0; }
+ const GLOBAL_CONST: u32 = 101;
+ fn test() {
+ const LOCAL_CONST: u32 = 99;
+ let x = LOCAL_CONST;
+ let z = GLOBAL_CONST;
+ let id = Foo::ASSOC_CONST;
+ }
+ "#,
+ expect![[r#"
+ 48..49 '0': u32
+ 79..82 '101': u32
+ 94..212 '{ ...NST; }': ()
+ 137..138 'x': u32
+ 141..152 'LOCAL_CONST': u32
+ 162..163 'z': u32
+ 166..178 'GLOBAL_CONST': u32
+ 188..190 'id': u32
+ 193..209 'Foo::A..._CONST': u32
+ 125..127 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_static() {
+ check_infer(
+ r#"
+ static GLOBAL_STATIC: u32 = 101;
+ static mut GLOBAL_STATIC_MUT: u32 = 101;
+ fn test() {
+ static LOCAL_STATIC: u32 = 99;
+ static mut LOCAL_STATIC_MUT: u32 = 99;
+ let x = LOCAL_STATIC;
+ let y = LOCAL_STATIC_MUT;
+ let z = GLOBAL_STATIC;
+ let w = GLOBAL_STATIC_MUT;
+ }
+ "#,
+ expect![[r#"
+ 28..31 '101': u32
+ 69..72 '101': u32
+ 84..279 '{ ...MUT; }': ()
+ 172..173 'x': u32
+ 176..188 'LOCAL_STATIC': u32
+ 198..199 'y': u32
+ 202..218 'LOCAL_...IC_MUT': u32
+ 228..229 'z': u32
+ 232..245 'GLOBAL_STATIC': u32
+ 255..256 'w': u32
+ 259..276 'GLOBAL...IC_MUT': u32
+ 117..119 '99': u32
+ 160..162 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn shadowing_primitive() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn const_eval_array_repeat_expr() {
+ check_types(
+ r#"
+fn main() {
+ const X: usize = 6 - 1;
+ let t = [(); X + 2];
+ //^ [(); 7]
+}"#,
+ );
+}
+
+#[test]
+fn shadowing_primitive_with_inner_items() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ fn inner() {}
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_primitive_by_module() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() {}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ foo();
+ //^^^^^ &str
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_module_by_primitive() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() -> u32 {0}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ str::foo();
+ //^^^^^^^^^^ u32
+}"#,
+ );
+}
+
+// This test is actually testing the shadowing behavior within hir_def. It
+// lives here because the testing infrastructure in hir_def isn't currently
+// capable of asserting the necessary conditions.
+#[test]
+fn should_be_shadowing_imports() {
+ check_types(
+ r#"
+mod a {
+ pub fn foo() -> i8 {0}
+ pub struct foo { a: i8 }
+}
+mod b { pub fn foo () -> u8 {0} }
+mod c { pub struct foo { a: u8 } }
+mod d {
+ pub use super::a::*;
+ pub use super::c::foo;
+ pub use super::b::foo;
+}
+
+fn main() {
+ d::foo();
+ //^^^^^^^^ u8
+ d::foo{a:0};
+ //^^^^^^^^^^^ foo
+}"#,
+ );
+}
+
+#[test]
+fn closure_return() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || -> usize { return 1; };
+ }
+ "#,
+ expect![[r#"
+ 16..58 '{ ...; }; }': u32
+ 26..27 'x': || -> usize
+ 30..55 '|| -> ...n 1; }': || -> usize
+ 42..55 '{ return 1; }': usize
+ 44..52 'return 1': !
+ 51..52 '1': usize
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_unit() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { return; };
+ }
+ "#,
+ expect![[r#"
+ 16..47 '{ ...; }; }': u32
+ 26..27 'x': || -> ()
+ 30..44 '|| { return; }': || -> ()
+ 33..44 '{ return; }': ()
+ 35..41 'return': !
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_inferred() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { "test" };
+ }
+ "#,
+ expect![[r#"
+ 16..46 '{ ..." }; }': u32
+ 26..27 'x': || -> &str
+ 30..43 '|| { "test" }': || -> &str
+ 33..43 '{ "test" }': &str
+ 35..41 '"test"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn fn_pointer_return() {
+ check_infer(
+ r#"
+ struct Vtable {
+ method: fn(),
+ }
+
+ fn main() {
+ let vtable = Vtable { method: || {} };
+ let m = vtable.method;
+ }
+ "#,
+ expect![[r#"
+ 47..120 '{ ...hod; }': ()
+ 57..63 'vtable': Vtable
+ 66..90 'Vtable...| {} }': Vtable
+ 83..88 '|| {}': || -> ()
+ 86..88 '{}': ()
+ 100..101 'm': fn()
+ 104..110 'vtable': Vtable
+ 104..117 'vtable.method': fn()
+ "#]],
+ );
+}
+
+#[test]
+fn block_modifiers_smoke_test() {
+ check_infer(
+ r#"
+//- minicore: future
+async fn main() {
+ let x = unsafe { 92 };
+ let y = async { async { () }.await };
+ let z = try { () };
+ let w = const { 92 };
+ let t = 'a: { 92 };
+}
+ "#,
+ expect![[r#"
+ 16..162 '{ ...2 }; }': ()
+ 26..27 'x': i32
+ 30..43 'unsafe { 92 }': i32
+ 30..43 'unsafe { 92 }': i32
+ 39..41 '92': i32
+ 53..54 'y': impl Future<Output = ()>
+ 57..85 'async ...wait }': ()
+ 57..85 'async ...wait }': impl Future<Output = ()>
+ 65..77 'async { () }': ()
+ 65..77 'async { () }': impl Future<Output = ()>
+ 65..83 'async ....await': ()
+ 73..75 '()': ()
+ 95..96 'z': {unknown}
+ 99..109 'try { () }': ()
+ 99..109 'try { () }': {unknown}
+ 105..107 '()': ()
+ 119..120 'w': i32
+ 123..135 'const { 92 }': i32
+ 123..135 'const { 92 }': i32
+ 131..133 '92': i32
+ 145..146 't': i32
+ 149..159 ''a: { 92 }': i32
+ 155..157 '92': i32
+ "#]],
+ )
+}
+#[test]
+fn async_block_early_return() {
+ check_infer(
+ r#"
+//- minicore: future, result, fn
+fn test<I, E, F: FnMut() -> Fut, Fut: core::future::Future<Output = Result<I, E>>>(f: F) {}
+
+fn main() {
+ async {
+ return Err(());
+ Ok(())
+ };
+ test(|| async {
+ return Err(());
+ Ok(())
+ });
+}
+ "#,
+ expect![[r#"
+ 83..84 'f': F
+ 89..91 '{}': ()
+ 103..231 '{ ... }); }': ()
+ 109..161 'async ... }': Result<(), ()>
+ 109..161 'async ... }': impl Future<Output = Result<(), ()>>
+ 125..139 'return Err(())': !
+ 132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 132..139 'Err(())': Result<(), ()>
+ 136..138 '()': ()
+ 149..151 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 149..155 'Ok(())': Result<(), ()>
+ 152..154 '()': ()
+ 167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
+ 167..228 'test(|... })': ()
+ 172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
+ 175..227 'async ... }': Result<(), ()>
+ 175..227 'async ... }': impl Future<Output = Result<(), ()>>
+ 191..205 'return Err(())': !
+ 198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 198..205 'Err(())': Result<(), ()>
+ 202..204 '()': ()
+ 215..217 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 215..221 'Ok(())': Result<(), ()>
+ 218..220 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_generic_from_later_assignment() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let mut end = None;
+ loop {
+ end = Some(true);
+ }
+ }
+ "#,
+ expect![[r#"
+ 59..129 '{ ... } }': ()
+ 69..76 'mut end': Option<bool>
+ 79..83 'None': Option<bool>
+ 89..127 'loop {... }': !
+ 94..127 '{ ... }': ()
+ 104..107 'end': Option<bool>
+ 104..120 'end = ...(true)': ()
+ 110..114 'Some': Some<bool>(bool) -> Option<bool>
+ 110..120 'Some(true)': Option<bool>
+ 115..119 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_with_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break None;
+ }
+
+ break Some(true);
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..168 '{ ... }; }': ()
+ 69..70 'x': Option<bool>
+ 73..165 'loop {... }': Option<bool>
+ 78..165 '{ ... }': ()
+ 88..132 'if fal... }': ()
+ 91..96 'false': bool
+ 97..132 '{ ... }': ()
+ 111..121 'break None': !
+ 117..121 'None': Option<bool>
+ 142..158 'break ...(true)': !
+ 148..152 'Some': Some<bool>(bool) -> Option<bool>
+ 148..158 'Some(true)': Option<bool>
+ 153..157 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_without_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break;
+ }
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..136 '{ ... }; }': ()
+ 69..70 'x': ()
+ 73..133 'loop {... }': ()
+ 78..133 '{ ... }': ()
+ 88..127 'if fal... }': ()
+ 91..96 'false': bool
+ 97..127 '{ ... }': ()
+ 111..116 'break': !
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_break_with_val() {
+ check_infer(
+ r#"
+ fn foo() {
+ let _x = || 'outer: loop {
+ let inner = 'inner: loop {
+ let i = Default::default();
+ if (break 'outer i) {
+ loop { break 'inner 5i8; };
+ } else if true {
+ break 'inner 6;
+ }
+ break 7;
+ };
+ break inner < 8;
+ };
+ }
+ "#,
+ expect![[r#"
+ 9..335 '{ ... }; }': ()
+ 19..21 '_x': || -> bool
+ 24..332 '|| 'ou... }': || -> bool
+ 27..332 ''outer... }': bool
+ 40..332 '{ ... }': ()
+ 54..59 'inner': i8
+ 62..300 ''inner... }': i8
+ 75..300 '{ ... }': ()
+ 93..94 'i': bool
+ 97..113 'Defaul...efault': {unknown}
+ 97..115 'Defaul...ault()': bool
+ 129..269 'if (br... }': ()
+ 133..147 'break 'outer i': !
+ 146..147 'i': bool
+ 149..208 '{ ... }': ()
+ 167..193 'loop {...5i8; }': !
+ 172..193 '{ brea...5i8; }': ()
+ 174..190 'break ...er 5i8': !
+ 187..190 '5i8': i8
+ 214..269 'if tru... }': ()
+ 217..221 'true': bool
+ 222..269 '{ ... }': ()
+ 240..254 'break 'inner 6': !
+ 253..254 '6': i8
+ 282..289 'break 7': !
+ 288..289 '7': i8
+ 310..325 'break inner < 8': !
+ 316..321 'inner': i8
+ 316..325 'inner < 8': bool
+ 324..325 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_block_break_with_val() {
+ check_infer(
+ r#"
+fn default<T>() -> T { loop {} }
+fn foo() {
+ let _x = 'outer: {
+ let inner = 'inner: {
+ let i = default();
+ if (break 'outer i) {
+ break 'inner 5i8;
+ } else if true {
+ break 'inner 6;
+ }
+ break 'inner 'innermost: { 0 };
+ 42
+ };
+ break 'outer inner < 8;
+ };
+}
+"#,
+ expect![[r#"
+ 21..32 '{ loop {} }': T
+ 23..30 'loop {}': !
+ 28..30 '{}': ()
+ 42..381 '{ ... }; }': ()
+ 52..54 '_x': bool
+ 57..378 ''outer... }': bool
+ 79..84 'inner': i8
+ 87..339 ''inner... }': i8
+ 113..114 'i': bool
+ 117..124 'default': fn default<bool>() -> bool
+ 117..126 'default()': bool
+ 140..270 'if (br... }': ()
+ 144..158 'break 'outer i': !
+ 157..158 'i': bool
+ 160..209 '{ ... }': ()
+ 178..194 'break ...er 5i8': !
+ 191..194 '5i8': i8
+ 215..270 'if tru... }': ()
+ 218..222 'true': bool
+ 223..270 '{ ... }': ()
+ 241..255 'break 'inner 6': !
+ 254..255 '6': i8
+ 283..313 'break ... { 0 }': !
+ 296..313 ''inner... { 0 }': i8
+ 310..311 '0': i8
+ 327..329 '42': i8
+ 349..371 'break ...er < 8': !
+ 362..367 'inner': i8
+ 362..371 'inner < 8': bool
+ 370..371 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test(t1: Thing, t2: OtherThing, t3: Thing<i32>, t4: OtherThing<i32>) {
+ t1.t;
+ t3.t;
+ match t2 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ match t4 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ }
+ "#,
+ expect![[r#"
+ 97..99 't1': Thing<()>
+ 108..110 't2': OtherThing<()>
+ 124..126 't3': Thing<i32>
+ 140..142 't4': OtherThing<i32>
+ 161..384 '{ ... } }': ()
+ 167..169 't1': Thing<()>
+ 167..171 't1.t': ()
+ 177..179 't3': Thing<i32>
+ 177..181 't3.t': i32
+ 187..282 'match ... }': ()
+ 193..195 't2': OtherThing<()>
+ 206..227 'OtherT... { t }': OtherThing<()>
+ 224..225 't': ()
+ 231..237 '{ t; }': ()
+ 233..234 't': ()
+ 247..265 'OtherT...Two(t)': OtherThing<()>
+ 263..264 't': ()
+ 269..275 '{ t; }': ()
+ 271..272 't': ()
+ 287..382 'match ... }': ()
+ 293..295 't4': OtherThing<i32>
+ 306..327 'OtherT... { t }': OtherThing<i32>
+ 324..325 't': i32
+ 331..337 '{ t; }': ()
+ 333..334 't': i32
+ 347..365 'OtherT...Two(t)': OtherThing<i32>
+ 363..364 't': i32
+ 369..375 '{ t; }': ()
+ 371..372 't': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_in_struct_literal() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test() {
+ let x = Thing { t: loop {} };
+ let y = Thing { t: () };
+ let z = Thing { t: 1i32 };
+ if let Thing { t } = z {
+ t;
+ }
+
+ let a = OtherThing::One { t: 1i32 };
+ let b = OtherThing::Two(1i32);
+ }
+ "#,
+ expect![[r#"
+ 99..319 '{ ...32); }': ()
+ 109..110 'x': Thing<!>
+ 113..133 'Thing ...p {} }': Thing<!>
+ 124..131 'loop {}': !
+ 129..131 '{}': ()
+ 143..144 'y': Thing<()>
+ 147..162 'Thing { t: () }': Thing<()>
+ 158..160 '()': ()
+ 172..173 'z': Thing<i32>
+ 176..193 'Thing ...1i32 }': Thing<i32>
+ 187..191 '1i32': i32
+ 199..240 'if let... }': ()
+ 202..221 'let Th... } = z': bool
+ 206..217 'Thing { t }': Thing<i32>
+ 214..215 't': i32
+ 220..221 'z': Thing<i32>
+ 222..240 '{ ... }': ()
+ 232..233 't': i32
+ 250..251 'a': OtherThing<i32>
+ 254..281 'OtherT...1i32 }': OtherThing<i32>
+ 275..279 '1i32': i32
+ 291..292 'b': OtherThing<i32>
+ 295..310 'OtherThing::Two': Two<i32>(i32) -> OtherThing<i32>
+ 295..316 'OtherT...(1i32)': OtherThing<i32>
+ 311..315 '1i32': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg() {
+ // FIXME: the {unknown} is a bug
+ check_infer(
+ r#"
+ struct Thing<T = u128, F = fn() -> T> { t: T }
+
+ fn test(t1: Thing<u32>, t2: Thing) {
+ t1;
+ t2;
+ Thing::<_> { t: 1u32 };
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<u32, fn() -> u32>
+ 72..74 't2': Thing<u128, fn() -> u128>
+ 83..130 '{ ...2 }; }': ()
+ 89..91 't1': Thing<u32, fn() -> u32>
+ 97..99 't2': Thing<u128, fn() -> u128>
+ 105..127 'Thing:...1u32 }': Thing<u32, fn() -> {unknown}>
+ 121..125 '1u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg_forward() {
+ // the {unknown} here is intentional, as defaults are not allowed to
+ // refer to type parameters coming later
+ check_infer(
+ r#"
+ struct Thing<F = fn() -> T, T = u128> { t: T }
+
+ fn test(t1: Thing) {
+ t1;
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<fn() -> {unknown}, u128>
+ 67..78 '{ t1; }': ()
+ 73..75 't1': Thing<fn() -> {unknown}, u128>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_operator_overload() {
+ check_types(
+ r#"
+//- minicore: add
+struct V2([f32; 2]);
+
+impl core::ops::Add<V2> for V2 {
+ type Output = V2;
+}
+
+fn test() {
+ let va = V2([0.0, 1.0]);
+ let vb = V2([0.0, 1.0]);
+
+ let r = va + vb;
+ // ^^^^^^^ V2
+}
+
+ "#,
+ );
+}
+
+#[test]
+fn infer_const_params() {
+ check_infer(
+ r#"
+ fn foo<const FOO: usize>() {
+ let bar = FOO;
+ }
+ "#,
+ expect![[r#"
+ 27..49 '{ ...FOO; }': ()
+ 37..40 'bar': usize
+ 43..46 'FOO': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ struct S { field: u32 }
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ "#,
+ expect![[r#"
+ 9..89 '{ ...eld; }': ()
+ 47..48 's': S
+ 51..65 'S { field: 0 }': S
+ 62..63 '0': u32
+ 75..76 'f': u32
+ 79..80 's': S
+ 79..86 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ {
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ struct S { field: u32 }
+ }
+ "#,
+ expect![[r#"
+ 9..109 '{ ...32 } }': ()
+ 15..79 '{ ... }': ()
+ 29..30 's': S
+ 33..47 'S { field: 0 }': S
+ 44..45 '0': u32
+ 61..62 'f': u32
+ 65..66 's': S
+ 65..72 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_enum_rename() {
+ check_infer(
+ r#"
+ enum Request {
+ Info
+ }
+
+ fn f() {
+ use Request as R;
+
+ let r = R::Info;
+ match r {
+ R::Info => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 34..123 '{ ... } }': ()
+ 67..68 'r': Request
+ 71..78 'R::Info': Request
+ 84..121 'match ... }': ()
+ 90..91 'r': Request
+ 102..109 'R::Info': Request
+ 113..115 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn box_into_vec() {
+ check_infer(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+pub struct Vec<T, A: Allocator = Global> {}
+
+#[lang = "slice"]
+impl<T> [T] {}
+
+#[lang = "slice_alloc"]
+impl<T> [T] {
+ pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
+ unimplemented!()
+ }
+}
+
+fn test() {
+ let vec = <[_]>::into_vec(box [1i32]);
+ let v: Vec<Box<dyn B>> = <[_]> :: into_vec(box [box Astruct]);
+}
+
+trait B{}
+struct Astruct;
+impl B for Astruct {}
+"#,
+ expect![[r#"
+ 569..573 'self': Box<[T], A>
+ 602..634 '{ ... }': Vec<T, A>
+ 612..628 'unimpl...ted!()': Vec<T, A>
+ 648..761 '{ ...t]); }': ()
+ 658..661 'vec': Vec<i32, Global>
+ 664..679 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 664..691 '<[_]>:...1i32])': Vec<i32, Global>
+ 680..690 'box [1i32]': Box<[i32; 1], Global>
+ 684..690 '[1i32]': [i32; 1]
+ 685..689 '1i32': i32
+ 701..702 'v': Vec<Box<dyn B, Global>, Global>
+ 722..739 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
+ 722..758 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
+ 740..757 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
+ 744..757 '[box Astruct]': [Box<dyn B, Global>; 1]
+ 745..756 'box Astruct': Box<Astruct, Global>
+ 749..756 'Astruct': Astruct
+ "#]],
+ )
+}
+
+#[test]
+fn cfgd_out_assoc_items() {
+ check_types(
+ r#"
+struct S;
+
+impl S {
+ #[cfg(FALSE)]
+ const C: S = S;
+}
+
+fn f() {
+ S::C;
+ //^^^^ {unknown}
+}
+ "#,
+ )
+}
+
+#[test]
+fn infer_missing_type() {
+ check_types(
+ r#"
+struct S;
+
+fn f() {
+ let s: = S;
+ //^ S
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_type_alias_variant() {
+ check_infer(
+ r#"
+type Qux = Foo;
+enum Foo {
+ Bar(i32),
+ Baz { baz: f32 }
+}
+
+fn f() {
+ match Foo::Bar(3) {
+ Qux::Bar(bar) => (),
+ Qux::Baz { baz } => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 72..166 '{ ... } }': ()
+ 78..164 'match ... }': ()
+ 84..92 'Foo::Bar': Bar(i32) -> Foo
+ 84..95 'Foo::Bar(3)': Foo
+ 93..94 '3': i32
+ 106..119 'Qux::Bar(bar)': Foo
+ 115..118 'bar': i32
+ 123..125 '()': ()
+ 135..151 'Qux::B... baz }': Foo
+ 146..149 'baz': f32
+ 155..157 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_boxed_self_receiver() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct Box<T>(T);
+
+impl<T> Deref for Box<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct Foo<T>(T);
+
+impl<T> Foo<T> {
+ fn get_inner<'a>(self: &'a Box<Self>) -> &'a T {}
+
+ fn get_self<'a>(self: &'a Box<Self>) -> &'a Self {}
+
+ fn into_inner(self: Box<Self>) -> Self {}
+}
+
+fn main() {
+ let boxed = Box(Foo(0_i32));
+
+ let bad1 = boxed.get_inner();
+ let good1 = Foo::get_inner(&boxed);
+
+ let bad2 = boxed.get_self();
+ let good2 = Foo::get_self(&boxed);
+
+ let inner = boxed.into_inner();
+}
+ "#,
+ expect![[r#"
+ 104..108 'self': &Box<T>
+ 188..192 'self': &Box<Foo<T>>
+ 218..220 '{}': &T
+ 242..246 'self': &Box<Foo<T>>
+ 275..277 '{}': &Foo<T>
+ 297..301 'self': Box<Foo<T>>
+ 322..324 '{}': Foo<T>
+ 338..559 '{ ...r(); }': ()
+ 348..353 'boxed': Box<Foo<i32>>
+ 356..359 'Box': Box<Foo<i32>>(Foo<i32>) -> Box<Foo<i32>>
+ 356..371 'Box(Foo(0_i32))': Box<Foo<i32>>
+ 360..363 'Foo': Foo<i32>(i32) -> Foo<i32>
+ 360..370 'Foo(0_i32)': Foo<i32>
+ 364..369 '0_i32': i32
+ 382..386 'bad1': &i32
+ 389..394 'boxed': Box<Foo<i32>>
+ 389..406 'boxed....nner()': &i32
+ 416..421 'good1': &i32
+ 424..438 'Foo::get_inner': fn get_inner<i32>(&Box<Foo<i32>>) -> &i32
+ 424..446 'Foo::g...boxed)': &i32
+ 439..445 '&boxed': &Box<Foo<i32>>
+ 440..445 'boxed': Box<Foo<i32>>
+ 457..461 'bad2': &Foo<i32>
+ 464..469 'boxed': Box<Foo<i32>>
+ 464..480 'boxed....self()': &Foo<i32>
+ 490..495 'good2': &Foo<i32>
+ 498..511 'Foo::get_self': fn get_self<i32>(&Box<Foo<i32>>) -> &Foo<i32>
+ 498..519 'Foo::g...boxed)': &Foo<i32>
+ 512..518 '&boxed': &Box<Foo<i32>>
+ 513..518 'boxed': Box<Foo<i32>>
+ 530..535 'inner': Foo<i32>
+ 538..543 'boxed': Box<Foo<i32>>
+ 538..556 'boxed....nner()': Foo<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_2015() {
+ check_types(
+ r#"
+//- /main.rs edition:2015 crate:main deps:core
+fn f() {
+ Rust;
+ //^^^^ Rust
+}
+
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2015 {
+ pub struct Rust;
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn legacy_const_generics() {
+ check_no_mismatches(
+ r#"
+#[rustc_legacy_const_generics(1, 3)]
+fn mixed<const N1: &'static str, const N2: bool>(
+ a: u8,
+ b: i8,
+) {}
+
+fn f() {
+ mixed(0, "", -1, true);
+ mixed::<"", true>(0, -1);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_slice() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^usize
+ [a,] = [0usize];
+
+ let a;
+ //^usize
+ [a, ..] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a, _] = [0usize; 5];
+
+ let a;
+ //^usize
+ [_, a, ..] = [0usize; 5];
+
+ let a: &mut i64 = &mut 0;
+ [*a, ..] = [1, 2, 3];
+
+ let a: usize;
+ let b;
+ //^usize
+ [a, _, b] = [3, 4, 5];
+ //^usize
+
+ let a;
+ //^i64
+ let b;
+ //^i64
+ [[a, ..], .., [.., b]] = [[1, 2], [3i64, 4], [5, 6], [7, 8]];
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, b) = ('c', 0i64);
+
+ let a;
+ //^char
+ (a, ..) = ('c', 0i64);
+
+ let a;
+ //^i64
+ (.., a) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, .., b) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ (a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ (_, a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ (_, a, .., b) = ('c', 0i64, true, 0usize);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b: i64 = 0;
+ (a, b) = (b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple_struct() {
+ check_types(
+ r#"
+struct S2(char, i64);
+struct S3(char, i64, bool);
+struct S4(char, i64, bool usize);
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, .., b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ S3(a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ S3(_, a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ S4(_, a, .., b) = S4('c', 0i64, true, 0usize);
+
+ struct Swap(i64, i64);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b = 0;
+ //^^^^^i64
+ Swap(a, b) = Swap(b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_struct() {
+ check_types(
+ r#"
+struct S {
+ a: usize,
+ b: char,
+}
+struct T {
+ s: S,
+ t: i64,
+}
+
+fn main() {
+ let a;
+ //^usize
+ let c;
+ //^char
+ S { a, b: c } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, .. } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, _ } = S { a: 3, b: 'b' };
+
+ let a;
+ //^usize
+ let c;
+ //^char
+ let t;
+ //^i64
+ T { s: S { a, b: c }, t } = T { s: S { a: 3, b: 'b' }, t: 0 };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_nested() {
+ check_types(
+ r#"
+struct S {
+ a: TS,
+ b: [char; 3],
+}
+struct TS(usize, i64);
+
+fn main() {
+ let a;
+ //^i32
+ let b;
+ //^bool
+ ([.., a], .., b, _) = ([0, 1, 2], true, 'c');
+
+ let a;
+ //^i32
+ let b;
+ //^i32
+ [(.., a, _), .., (b, ..)] = [(1, 2); 5];
+
+ let a;
+ //^usize
+ let b;
+ //^char
+ S { a: TS(a, ..), b: [_, b, ..] } = S { a: TS(0, 0), b: ['a'; 3] };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_unit_struct() {
+ // taken from rustc; see https://github.com/rust-lang/rust/pull/95380
+ check_no_mismatches(
+ r#"
+struct S;
+enum E { V, }
+type A = E;
+
+fn main() {
+ let mut a;
+
+ (S, a) = (S, ());
+
+ (E::V, a) = (E::V, ());
+
+ (<E>::V, a) = (E::V, ());
+ (A::V, a) = (E::V, ());
+}
+
+impl S {
+ fn check() {
+ let a;
+ (Self, a) = (S, ());
+ }
+}
+
+impl E {
+ fn check() {
+ let a;
+ (Self::V, a) = (E::V, ());
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_no_default_binding_mode() {
+ check(
+ r#"
+struct S { a: usize }
+struct TS(usize);
+fn main() {
+ let x;
+ [x,] = &[1,];
+ //^^^^expected &[i32; 1], got [{unknown}; _]
+
+ // FIXME we only want the outermost error, but this matches the current
+ // behavior of slice patterns
+ let x;
+ [(x,),] = &[(1,),];
+ // ^^^^expected {unknown}, got ({unknown},)
+ //^^^^^^^expected &[(i32,); 1], got [{unknown}; _]
+
+ let x;
+ ((x,),) = &((1,),);
+ //^^^^^^^expected &((i32,),), got (({unknown},),)
+
+ let x;
+ (x,) = &(1,);
+ //^^^^expected &(i32,), got ({unknown},)
+
+ let x;
+ (S { a: x },) = &(S { a: 42 },);
+ //^^^^^^^^^^^^^expected &(S,), got (S,)
+
+ let x;
+ S { a: x } = &S { a: 42 };
+ //^^^^^^^^^^expected &S, got S
+
+ let x;
+ TS(x) = &TS(42);
+ //^^^^^expected &TS, got TS
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_type_mismatch_on_identifier() {
+ check(
+ r#"
+struct S { v: i64 }
+struct TS(i64);
+fn main() {
+ let mut a: usize = 0;
+ (a,) = (0i64,);
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ [a,] = [0i64,];
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ S { v: a } = S { v: 0 };
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ TS(a) = TS(0);
+ //^expected i64, got usize
+}
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
new file mode 100644
index 000000000..75802a5eb
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -0,0 +1,3782 @@
+use cov_mark::check;
+use expect_test::expect;
+
+use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn infer_await() {
+ check_types(
+ r#"
+//- minicore: future
+struct IntFuture;
+
+impl core::future::Future for IntFuture {
+ type Output = u64;
+}
+
+fn test() {
+ let r = IntFuture;
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_async() {
+ check_types(
+ r#"
+//- minicore: future
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_desugar_async() {
+ check_types(
+ r#"
+//- minicore: future, sized
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ r;
+} //^ impl Future<Output = u64>
+"#,
+ );
+}
+
+#[test]
+fn infer_async_block() {
+ check_types(
+ r#"
+//- minicore: future, option
+async fn test() {
+ let a = async { 42 };
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let b = async {}.await;
+ b;
+// ^ ()
+ let c = async {
+ let y = None;
+ y
+ // ^ Option<u64>
+ };
+ let _: Option<u64> = c.await;
+ c;
+// ^ impl Future<Output = Option<u64>>
+}
+"#,
+ );
+}
+
+#[test]
+fn auto_sized_async_block() {
+ check_no_mismatches(
+ r#"
+//- minicore: future, sized
+
+use core::future::Future;
+struct MyFut<Fut>(Fut);
+
+impl<Fut> Future for MyFut<Fut>
+where Fut: Future
+{
+ type Output = Fut::Output;
+}
+async fn reproduction() -> usize {
+ let f = async {999usize};
+ MyFut(f).await
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+//- minicore: future
+//#11815
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+fn send() -> Box<dyn Future<Output = ()> + Send + 'static>{
+ box async move {}
+}
+
+fn not_send() -> Box<dyn Future<Output = ()> + 'static> {
+ box async move {}
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_try() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub trait Try {
+ type Ok;
+ type Error;
+ }
+}
+
+pub mod result {
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> crate::ops::Try for Result<O, E> {
+ type Ok = O;
+ type Error = E;
+ }
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::{result::*, ops::*};
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_try_trait_v2() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+mod ops {
+ mod try_trait {
+ pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+ }
+ pub trait FromResidual<R = <Self as Try>::Residual> {}
+ }
+
+ pub use self::try_trait::FromResidual;
+ pub use self::try_trait::Try;
+}
+
+mod convert {
+ pub trait From<T> {}
+ impl<T> From<T> for T {}
+}
+
+pub mod result {
+ use crate::convert::From;
+ use crate::ops::{Try, FromResidual};
+
+ pub enum Infallible {}
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> Try for Result<O, E> {
+ type Output = O;
+ type Error = Result<Infallible, E>;
+ }
+
+ impl<T, E, F: From<E>> FromResidual<Result<Infallible, E>> for Result<T, F> {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::result::*;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_for_loop() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core,alloc
+#![no_std]
+use alloc::collections::Vec;
+
+fn test() {
+ let v = Vec::new();
+ v.push("foo");
+ for x in v {
+ x;
+ } //^ &str
+}
+
+//- /core.rs crate:core
+pub mod iter {
+ pub trait IntoIterator {
+ type Item;
+ }
+}
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::iter::*;
+ }
+}
+
+//- /alloc.rs crate:alloc deps:core
+#![no_std]
+pub mod collections {
+ pub struct Vec<T> {}
+ impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) { }
+ }
+
+ impl<T> IntoIterator for Vec<T> {
+ type Item=T;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_neg() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Neg for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = -a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "neg"]
+ pub trait Neg {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_not() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Not for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = !a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "not"]
+ pub trait Not {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_1() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<T: Trait<u32>>(t: T) {}
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_2() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<U, T: Trait<U>>(t: T) -> U { loop {} }
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ let x: u32 = foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_trait() {
+ cov_mark::check!(trait_self_implements_self);
+ check(
+ r#"
+trait Trait {
+ fn foo(&self) -> i64;
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_super_trait() {
+ check(
+ r#"
+trait SuperTrait {
+ fn foo(&self) -> i64;
+}
+trait Trait: SuperTrait {
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_project_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn test<T: Iterable>() {
+ let x: <S as Iterable>::Item = 1;
+ // ^ u32
+ let y: <T as Iterable>::Item = u;
+ // ^ Iterable::Item<T>
+ let z: T::Item = u;
+ // ^ Iterable::Item<T>
+ let a: <T>::Item = u;
+ // ^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn infer_return_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn foo1<T: Iterable>(t: T) -> T::Item { loop {} }
+fn foo2<T: Iterable>(t: T) -> <T as Iterable>::Item { loop {} }
+fn foo3<T: Iterable>(t: T) -> <T>::Item { loop {} }
+fn test() {
+ foo1(S);
+ // ^^^^^^^ u32
+ foo2(S);
+ // ^^^^^^^ u32
+ foo3(S);
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_method_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S<T>;
+impl<T> S<T> {
+ fn foo(self) -> T::Item where T: Iterable { loop {} }
+}
+fn test<T: Iterable>() {
+ let s: S<T>;
+ s.foo();
+ // ^^^^^^^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_self_issue_12484() {
+ check_types(
+ r#"
+trait Bar {
+ type A;
+}
+trait Foo {
+ type A;
+ fn test(a: Self::A, _: impl Bar) {
+ a;
+ //^ Foo::A<Self>
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_associated_type_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+fn test<T: Iterable<Item=u32>>() {
+ let y: T::Item = unknown;
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn infer_const_body() {
+ // FIXME make check_types work with other bodies
+ check_infer(
+ r#"
+const A: u32 = 1 + 1;
+static B: u64 = { let x = 1; x };
+"#,
+ expect![[r#"
+ 15..16 '1': u32
+ 15..20 '1 + 1': u32
+ 19..20 '1': u32
+ 38..54 '{ let ...1; x }': u64
+ 44..45 'x': u64
+ 48..49 '1': u64
+ 51..52 'x': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_fields() {
+ check_infer(
+ r#"
+struct S(i32, u64);
+fn test() -> u64 {
+ let a = S(4, 6);
+ let b = a.0;
+ a.1
+}"#,
+ expect![[r#"
+ 37..86 '{ ... a.1 }': u64
+ 47..48 'a': S
+ 51..52 'S': S(i32, u64) -> S
+ 51..58 'S(4, 6)': S
+ 53..54 '4': i32
+ 56..57 '6': u64
+ 68..69 'b': i32
+ 72..73 'a': S
+ 72..75 'a.0': i32
+ 81..82 'a': S
+ 81..84 'a.1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_with_fn() {
+ check_infer(
+ r#"
+struct S(fn(u32) -> u64);
+fn test() -> u64 {
+ let a = S(|i| 2*i);
+ let b = a.0(4);
+ a.0(2)
+}"#,
+ expect![[r#"
+ 43..101 '{ ...0(2) }': u64
+ 53..54 'a': S
+ 57..58 'S': S(fn(u32) -> u64) -> S
+ 57..67 'S(|i| 2*i)': S
+ 59..66 '|i| 2*i': |u32| -> u64
+ 60..61 'i': u32
+ 63..64 '2': u32
+ 63..66 '2*i': u32
+ 65..66 'i': u32
+ 77..78 'b': u64
+ 81..82 'a': S
+ 81..84 'a.0': fn(u32) -> u64
+ 81..87 'a.0(4)': u64
+ 85..86 '4': u32
+ 93..94 'a': S
+ 93..96 'a.0': fn(u32) -> u64
+ 93..99 'a.0(2)': u64
+ 97..98 '2': u32
+ "#]],
+ );
+}
+
+#[test]
+fn indexing_arrays() {
+ check_infer(
+ "fn main() { &mut [9][2]; }",
+ expect![[r#"
+ 10..26 '{ &mut...[2]; }': ()
+ 12..23 '&mut [9][2]': &mut {unknown}
+ 17..20 '[9]': [i32; 1]
+ 17..23 '[9][2]': {unknown}
+ 18..19 '9': i32
+ 21..22 '2': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_ops_index() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32];
+ b;
+} //^ Foo
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32].field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field_autoderef() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = (&a[1u32]).field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_int() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+struct Range;
+impl core::ops::Index<Range> for Bar {
+ type Output = Bar;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1];
+ b;
+ //^ Foo
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_autoderef() {
+ check_types(
+ r#"
+//- minicore: index, slice
+fn test() {
+ let a = &[1u32, 2, 3];
+ let b = a[1];
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn deref_trait() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+fn new_arc<T: ?Sized>() -> Arc<T> { Arc }
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+fn foo(a: Arc<S>) {}
+
+fn test() {
+ let a = new_arc();
+ let b = *a;
+ //^^ S
+ foo(a);
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_infinite_recursion() {
+ check_types(
+ r#"
+//- minicore: deref
+struct S;
+
+impl core::ops::Deref for S {
+ type Target = S;
+}
+
+fn test(s: S) {
+ s.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_question_mark_size() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_implicit_sized_requirement_on_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Foo<T>;
+impl<T> core::ops::Deref for Foo<T> {
+ type Target = ();
+}
+fn test() {
+ let foo = Foo;
+ *foo;
+ //^^^^ ()
+ let _: Foo<u8> = foo;
+}
+"#,
+ )
+}
+
+#[test]
+fn obligation_from_function_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<u32> for S {}
+
+fn foo<T: Trait<U>, U>(t: T) -> U { loop {} }
+
+fn test(s: S) {
+ foo(s);
+} //^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_method_clause() {
+ check_types(
+ r#"
+//- /main.rs
+struct S;
+
+trait Trait<T> {}
+impl Trait<isize> for S {}
+
+struct O;
+impl O {
+ fn foo<T: Trait<U>, U>(&self, t: T) -> U { loop {} }
+}
+
+fn test() {
+ O.foo(S);
+} //^^^^^^^^ isize
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_self_method_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<i64> for S {}
+
+impl S {
+ fn foo<U>(&self) -> U where Self: Trait<U> { loop {} }
+}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ i64
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_impl_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<&str> for S {}
+
+struct O<T>;
+impl<U, T: Trait<U>> O<T> {
+ fn foo(&self) -> U { loop {} }
+}
+
+fn test(o: O<S>) {
+ o.foo();
+} //^^^^^^^ &str
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T: Clone>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1_not_met() {
+ check_types(
+ r#"
+//- /main.rs
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T: Trait>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2_not_met() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+trait Trait {}
+impl<T> core::ops::Deref for T where T: Trait {
+ type Target = i128;
+}
+fn test<T: Trait>(t: T) { *t; }
+ //^^ i128
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder() {
+ // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types].
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+
+pub struct RefMutL<T>;
+
+impl<T> ApplyL for RefMutL<T> {
+ type Out = <T as ApplyL>::Out;
+}
+
+fn test<T: ApplyL>() {
+ let y: <RefMutL<T> as ApplyL>::Out = no_matter;
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder_2() {
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+fn foo<T: ApplyL>(t: T) -> <T as ApplyL>::Out;
+
+fn test<T: ApplyL>(t: T) {
+ let y = foo(t);
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn argument_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar(x: impl Trait<u16>) {}
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u32>) {
+ x;
+ y;
+ let z = S(1);
+ bar(z);
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 77..78 'x': impl Trait<u16>
+ 97..99 '{}': ()
+ 154..155 'x': impl Trait<u64>
+ 174..175 'y': &impl Trait<u32>
+ 195..323 '{ ...2(); }': ()
+ 201..202 'x': impl Trait<u64>
+ 208..209 'y': &impl Trait<u32>
+ 219..220 'z': S<u16>
+ 223..224 'S': S<u16>(u16) -> S<u16>
+ 223..227 'S(1)': S<u16>
+ 225..226 '1': u16
+ 233..236 'bar': fn bar(S<u16>)
+ 233..239 'bar(z)': ()
+ 237..238 'z': S<u16>
+ 245..246 'x': impl Trait<u64>
+ 245..252 'x.foo()': u64
+ 258..259 'y': &impl Trait<u32>
+ 258..265 'y.foo()': u32
+ 271..272 'z': S<u16>
+ 271..278 'z.foo()': u16
+ 284..285 'x': impl Trait<u64>
+ 284..292 'x.foo2()': i64
+ 298..299 'y': &impl Trait<u32>
+ 298..306 'y.foo2()': i64
+ 312..313 'z': S<u16>
+ 312..320 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+trait Foo {
+ // this function has an implicit Self param, an explicit type param,
+ // and an implicit impl Trait param!
+ fn bar<T>(x: impl Trait) -> T { loop {} }
+}
+fn foo<T>(x: impl Trait) -> T { loop {} }
+struct S;
+impl Trait for S {}
+struct F;
+impl Foo for F {}
+
+fn test() {
+ Foo::bar(S);
+ <F as Foo>::bar(S);
+ F::bar(S);
+ Foo::bar::<u32>(S);
+ <F as Foo>::bar::<u32>(S);
+
+ foo(S);
+ foo::<u32>(S);
+ foo::<u32, i32>(S); // we should ignore the extraneous i32
+}"#,
+ expect![[r#"
+ 155..156 'x': impl Trait
+ 175..186 '{ loop {} }': T
+ 177..184 'loop {}': !
+ 182..184 '{}': ()
+ 199..200 'x': impl Trait
+ 219..230 '{ loop {} }': T
+ 221..228 'loop {}': !
+ 226..228 '{}': ()
+ 300..509 '{ ... i32 }': ()
+ 306..314 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown}
+ 306..317 'Foo::bar(S)': {unknown}
+ 315..316 'S': S
+ 323..338 '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 323..341 '<F as ...bar(S)': {unknown}
+ 339..340 'S': S
+ 347..353 'F::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 347..356 'F::bar(S)': {unknown}
+ 354..355 'S': S
+ 362..377 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32
+ 362..380 'Foo::b...32>(S)': u32
+ 378..379 'S': S
+ 386..408 '<F as ...:<u32>': fn bar<F, u32>(S) -> u32
+ 386..411 '<F as ...32>(S)': u32
+ 409..410 'S': S
+ 418..421 'foo': fn foo<{unknown}>(S) -> {unknown}
+ 418..424 'foo(S)': {unknown}
+ 422..423 'S': S
+ 430..440 'foo::<u32>': fn foo<u32>(S) -> u32
+ 430..443 'foo::<u32>(S)': u32
+ 441..442 'S': S
+ 449..464 'foo::<u32, i32>': fn foo<u32>(S) -> u32
+ 449..467 'foo::<...32>(S)': u32
+ 465..466 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_2() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct S;
+impl Trait for S {}
+struct F<T>;
+impl<T> F<T> {
+ fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
+}
+
+fn test() {
+ F.foo(S);
+ F::<u32>.foo(S);
+ F::<u32>.foo::<i32>(S);
+ F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
+}"#,
+ expect![[r#"
+ 87..91 'self': F<T>
+ 93..94 'x': impl Trait
+ 118..129 '{ loop {} }': (T, U)
+ 120..127 'loop {}': !
+ 125..127 '{}': ()
+ 143..283 '{ ...ored }': ()
+ 149..150 'F': F<{unknown}>
+ 149..157 'F.foo(S)': ({unknown}, {unknown})
+ 155..156 'S': S
+ 163..171 'F::<u32>': F<u32>
+ 163..178 'F::<u32>.foo(S)': (u32, {unknown})
+ 176..177 'S': S
+ 184..192 'F::<u32>': F<u32>
+ 184..206 'F::<u3...32>(S)': (u32, i32)
+ 204..205 'S': S
+ 212..220 'F::<u32>': F<u32>
+ 212..239 'F::<u3...32>(S)': (u32, i32)
+ 237..238 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_to_fn_pointer() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo(x: impl Trait) { loop {} }
+struct S;
+impl Trait for S {}
+
+fn test() {
+ let f: fn(S) -> () = foo;
+}"#,
+ expect![[r#"
+ 22..23 'x': impl Trait
+ 37..48 '{ loop {} }': ()
+ 39..46 'loop {}': !
+ 44..46 '{}': ()
+ 90..123 '{ ...foo; }': ()
+ 100..101 'f': fn(S)
+ 117..120 'foo': fn foo(S)
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> impl Trait<u64> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 98..100 '{}': ()
+ 110..111 'x': impl Trait<u64>
+ 130..131 'y': &impl Trait<u64>
+ 151..268 '{ ...2(); }': ()
+ 157..158 'x': impl Trait<u64>
+ 164..165 'y': &impl Trait<u64>
+ 175..176 'z': impl Trait<u64>
+ 179..182 'bar': fn bar() -> impl Trait<u64>
+ 179..184 'bar()': impl Trait<u64>
+ 190..191 'x': impl Trait<u64>
+ 190..197 'x.foo()': u64
+ 203..204 'y': &impl Trait<u64>
+ 203..210 'y.foo()': u64
+ 216..217 'z': impl Trait<u64>
+ 216..223 'z.foo()': u64
+ 229..230 'x': impl Trait<u64>
+ 229..237 'x.foo2()': i64
+ 243..244 'y': &impl Trait<u64>
+ 243..251 'y.foo2()': i64
+ 257..258 'z': impl Trait<u64>
+ 257..265 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn simple_return_pos_impl_trait() {
+ cov_mark::check!(lower_rpit);
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> impl Trait<u64> { loop {} }
+
+fn test() {
+ let a = bar();
+ a.foo();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 71..82 '{ loop {} }': !
+ 73..80 'loop {}': !
+ 78..80 '{}': ()
+ 94..129 '{ ...o(); }': ()
+ 104..105 'a': impl Trait<u64>
+ 108..111 'bar': fn bar() -> impl Trait<u64>
+ 108..113 'bar()': impl Trait<u64>
+ 119..120 'a': impl Trait<u64>
+ 119..126 'a.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn more_return_pos_impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Iterator {
+ type Item;
+ fn next(&mut self) -> Self::Item;
+}
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>) { loop {} }
+fn baz<T>(t: T) -> (impl Iterator<Item = impl Trait<T>>, impl Trait<T>) { loop {} }
+
+fn test() {
+ let (a, b) = bar();
+ a.next().foo();
+ b.foo();
+ let (c, d) = baz(1u128);
+ c.next().foo();
+ d.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &mut Self
+ 101..105 'self': &Self
+ 184..195 '{ loop {} }': ({unknown}, {unknown})
+ 186..193 'loop {}': !
+ 191..193 '{}': ()
+ 206..207 't': T
+ 268..279 '{ loop {} }': ({unknown}, {unknown})
+ 270..277 'loop {}': !
+ 275..277 '{}': ()
+ 291..413 '{ ...o(); }': ()
+ 301..307 '(a, b)': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 302..303 'a': impl Iterator<Item = impl Trait<u32>>
+ 305..306 'b': impl Trait<u64>
+ 310..313 'bar': fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 310..315 'bar()': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 321..322 'a': impl Iterator<Item = impl Trait<u32>>
+ 321..329 'a.next()': impl Trait<u32>
+ 321..335 'a.next().foo()': u32
+ 341..342 'b': impl Trait<u64>
+ 341..348 'b.foo()': u64
+ 358..364 '(c, d)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 359..360 'c': impl Iterator<Item = impl Trait<u128>>
+ 362..363 'd': impl Trait<u128>
+ 367..370 'baz': fn baz<u128>(u128) -> (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 367..377 'baz(1u128)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 371..376 '1u128': u128
+ 383..384 'c': impl Iterator<Item = impl Trait<u128>>
+ 383..391 'c.next()': impl Trait<u128>
+ 383..397 'c.next().foo()': u128
+ 403..404 'd': impl Trait<u128>
+ 403..410 'd.foo()': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_from_return_pos_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, sized
+trait Trait<T> {}
+struct Bar<T>(T);
+impl<T> Trait<T> for Bar<T> {}
+fn foo<const C: u8, T>() -> (impl FnOnce(&str, T), impl Trait<u8>) {
+ (|input, t| {}, Bar(C))
+}
+"#,
+ expect![[r#"
+ 134..165 '{ ...(C)) }': (|&str, T| -> (), Bar<u8>)
+ 140..163 '(|inpu...ar(C))': (|&str, T| -> (), Bar<u8>)
+ 141..154 '|input, t| {}': |&str, T| -> ()
+ 142..147 'input': &str
+ 149..150 't': T
+ 152..154 '{}': ()
+ 156..159 'Bar': Bar<u8>(u8) -> Bar<u8>
+ 156..162 'Bar(C)': Bar<u8>
+ 160..161 'C': u8
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> dyn Trait<u64> {}
+
+fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 97..99 '{}': dyn Trait<u64>
+ 109..110 'x': dyn Trait<u64>
+ 128..129 'y': &dyn Trait<u64>
+ 148..265 '{ ...2(); }': ()
+ 154..155 'x': dyn Trait<u64>
+ 161..162 'y': &dyn Trait<u64>
+ 172..173 'z': dyn Trait<u64>
+ 176..179 'bar': fn bar() -> dyn Trait<u64>
+ 176..181 'bar()': dyn Trait<u64>
+ 187..188 'x': dyn Trait<u64>
+ 187..194 'x.foo()': u64
+ 200..201 'y': &dyn Trait<u64>
+ 200..207 'y.foo()': u64
+ 213..214 'z': dyn Trait<u64>
+ 213..220 'z.foo()': u64
+ 226..227 'x': dyn Trait<u64>
+ 226..234 'x.foo2()': i64
+ 240..241 'y': &dyn Trait<u64>
+ 240..248 'y.foo2()': i64
+ 254..255 'z': dyn Trait<u64>
+ 254..262 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_in_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T, U> {
+ fn foo(&self) -> (T, U);
+}
+struct S<T, U> {}
+impl<T, U> S<T, U> {
+ fn bar(&self) -> &dyn Trait<T, U> { loop {} }
+}
+trait Trait2<T, U> {
+ fn baz(&self) -> (T, U);
+}
+impl<T, U> Trait2<T, U> for dyn Trait<T, U> { }
+
+fn test(s: S<u32, i32>) {
+ s.bar().baz();
+}"#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 102..106 'self': &S<T, U>
+ 128..139 '{ loop {} }': &dyn Trait<T, U>
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 175..179 'self': &Self
+ 251..252 's': S<u32, i32>
+ 267..289 '{ ...z(); }': ()
+ 273..274 's': S<u32, i32>
+ 273..280 's.bar()': &dyn Trait<u32, i32>
+ 273..286 's.bar().baz()': (u32, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_bare() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ fn foo(&self) -> u64;
+}
+fn bar() -> Trait {}
+
+fn test(x: Trait, y: &Trait) -> u64 {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+}"#,
+ expect![[r#"
+ 26..30 'self': &Self
+ 60..62 '{}': dyn Trait
+ 72..73 'x': dyn Trait
+ 82..83 'y': &dyn Trait
+ 100..175 '{ ...o(); }': u64
+ 106..107 'x': dyn Trait
+ 113..114 'y': &dyn Trait
+ 124..125 'z': dyn Trait
+ 128..131 'bar': fn bar() -> dyn Trait
+ 128..133 'bar()': dyn Trait
+ 139..140 'x': dyn Trait
+ 139..146 'x.foo()': u64
+ 152..153 'y': &dyn Trait
+ 152..159 'y.foo()': u64
+ 165..166 'z': dyn Trait
+ 165..172 'z.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn weird_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn test(
+ a: impl Trait + 'lifetime,
+ b: impl 'lifetime,
+ c: impl (Trait),
+ d: impl ('lifetime),
+ e: impl ?Sized,
+ f: impl Trait + ?Sized
+) {}
+"#,
+ expect![[r#"
+ 28..29 'a': impl Trait
+ 59..60 'b': impl Sized
+ 82..83 'c': impl Trait
+ 103..104 'd': impl Sized
+ 128..129 'e': impl ?Sized
+ 148..149 'f': impl Trait + ?Sized
+ 173..175 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn error_bound_chalk() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self) -> u32 { 0 }
+}
+
+fn test(x: (impl Trait + UnknownTrait)) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn assoc_type_bindings() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ type Type;
+}
+
+fn get<T: Trait>(t: T) -> <T as Trait>::Type {}
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> Trait for S<T> { type Type = T; }
+
+fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
+ get(x);
+ get2(x);
+ get(y);
+ get2(y);
+ get(set(S));
+ get2(set(S));
+ get2(S::<str>);
+}"#,
+ expect![[r#"
+ 49..50 't': T
+ 77..79 '{}': Trait::Type<T>
+ 111..112 't': T
+ 122..124 '{}': U
+ 154..155 't': T
+ 165..168 '{t}': T
+ 166..167 't': T
+ 256..257 'x': T
+ 262..263 'y': impl Trait<Type = i64>
+ 289..397 '{ ...r>); }': ()
+ 295..298 'get': fn get<T>(T) -> <T as Trait>::Type
+ 295..301 'get(x)': u32
+ 299..300 'x': T
+ 307..311 'get2': fn get2<u32, T>(T) -> u32
+ 307..314 'get2(x)': u32
+ 312..313 'x': T
+ 320..323 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
+ 320..326 'get(y)': i64
+ 324..325 'y': impl Trait<Type = i64>
+ 332..336 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
+ 332..339 'get2(y)': i64
+ 337..338 'y': impl Trait<Type = i64>
+ 345..348 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
+ 345..356 'get(set(S))': u64
+ 349..352 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 349..355 'set(S)': S<u64>
+ 353..354 'S': S<u64>
+ 362..366 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 362..374 'get2(set(S))': u64
+ 367..370 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 367..373 'set(S)': S<u64>
+ 371..372 'S': S<u64>
+ 380..384 'get2': fn get2<str, S<str>>(S<str>) -> str
+ 380..394 'get2(S::<str>)': str
+ 385..393 'S::<str>': S<str>
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait_assoc_binding_projection_bug() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub trait Language {
+ type Kind;
+}
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+}
+struct SyntaxNode<L> {}
+fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
+
+trait Clone {
+ fn clone(&self) -> Self;
+}
+
+fn api_walkthrough() {
+ for node in foo() {
+ node.clone();
+ } //^^^^^^^^^^^^ {unknown}
+}
+"#,
+ );
+}
+
+#[test]
+fn projection_eq_within_chalk() {
+ check_infer(
+ r#"
+trait Trait1 {
+ type Type;
+}
+trait Trait2<T> {
+ fn foo(self) -> T;
+}
+impl<T, U> Trait2<T> for U where U: Trait1<Type = T> {}
+
+fn test<T: Trait1<Type = u32>>(x: T) {
+ x.foo();
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 163..164 'x': T
+ 169..185 '{ ...o(); }': ()
+ 175..176 'x': T
+ 175..182 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn where_clause_trait_in_scope_for_method_resolution() {
+ check_types(
+ r#"
+mod foo {
+ trait Trait {
+ fn foo(&self) -> u32 { 0 }
+ }
+}
+
+fn test<T: foo::Trait>(x: T) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn super_trait_method_resolution() {
+ check_infer(
+ r#"
+mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+trait Trait2 where Self: foo::SuperTrait {}
+
+fn test<T: Trait1, U: Trait2>(x: T, y: U) {
+ x.foo();
+ y.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': u32
+ 181..182 'x': T
+ 187..188 'y': U
+ 193..222 '{ ...o(); }': ()
+ 199..200 'x': T
+ 199..206 'x.foo()': u32
+ 212..213 'y': U
+ 212..219 'y.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_impl_trait_method_resolution() {
+ check_infer(
+ r#"
+//- minicore: sized
+mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+
+fn test(x: &impl Trait1) {
+ x.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': u32
+ 115..116 'x': &impl Trait1
+ 132..148 '{ ...o(); }': ()
+ 138..139 'x': &impl Trait1
+ 138..145 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_cycle() {
+ // This just needs to not crash
+ check_infer(
+ r#"
+ trait A: B {}
+ trait B: A {}
+
+ fn test<T: A>(x: T) {
+ x.foo();
+ }
+ "#,
+ expect![[r#"
+ 43..44 'x': T
+ 49..65 '{ ...o(); }': ()
+ 55..56 'x': T
+ 55..62 'x.foo()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_assoc_type_bounds() {
+ check_infer(
+ r#"
+trait SuperTrait { type Type; }
+trait Trait where Self: SuperTrait {}
+
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> SuperTrait for S<T> { type Type = T; }
+impl<T> Trait for S<T> {}
+
+fn test() {
+ get2(set(S));
+}"#,
+ expect![[r#"
+ 102..103 't': T
+ 113..115 '{}': U
+ 145..146 't': T
+ 156..159 '{t}': T
+ 157..158 't': T
+ 258..279 '{ ...S)); }': ()
+ 264..268 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 264..276 'get2(set(S))': u64
+ 269..272 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 269..275 'set(S)': S<u64>
+ 273..274 'S': S<u64>
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait() {
+ check_infer_with_mismatches(
+ r#"
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> <Self as FnOnce<Args>>::Output;
+}
+
+fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
+ f.call_once((1, 2));
+}"#,
+ expect![[r#"
+ 56..60 'self': Self
+ 62..66 'args': Args
+ 149..150 'f': F
+ 155..183 '{ ...2)); }': ()
+ 161..162 'f': F
+ 161..180 'f.call...1, 2))': u128
+ 173..179 '(1, 2)': (u32, u64)
+ 174..175 '1': u32
+ 177..178 '2': u64
+ "#]],
+ );
+}
+
+#[test]
+fn fn_ptr_and_item() {
+ check_infer_with_mismatches(
+ r#"
+#[lang="fn_once"]
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> Self::Output;
+}
+
+trait Foo<T> {
+ fn foo(&self) -> T;
+}
+
+struct Bar<T>(T);
+
+impl<A1, R, F: FnOnce(A1) -> R> Foo<(A1, R)> for Bar<F> {
+ fn foo(&self) -> (A1, R) { loop {} }
+}
+
+enum Opt<T> { None, Some(T) }
+impl<T> Opt<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Opt<U> { loop {} }
+}
+
+fn test() {
+ let bar: Bar<fn(u8) -> u32>;
+ bar.foo();
+
+ let opt: Opt<u8>;
+ let f: fn(u8) -> u32;
+ opt.map(f);
+}"#,
+ expect![[r#"
+ 74..78 'self': Self
+ 80..84 'args': Args
+ 139..143 'self': &Self
+ 243..247 'self': &Bar<F>
+ 260..271 '{ loop {} }': (A1, R)
+ 262..269 'loop {}': !
+ 267..269 '{}': ()
+ 355..359 'self': Opt<T>
+ 361..362 'f': F
+ 377..388 '{ loop {} }': Opt<U>
+ 379..386 'loop {}': !
+ 384..386 '{}': ()
+ 402..518 '{ ...(f); }': ()
+ 412..415 'bar': Bar<fn(u8) -> u32>
+ 441..444 'bar': Bar<fn(u8) -> u32>
+ 441..450 'bar.foo()': (u8, u32)
+ 461..464 'opt': Opt<u8>
+ 483..484 'f': fn(u8) -> u32
+ 505..508 'opt': Opt<u8>
+ 505..515 'opt.map(f)': Opt<u32>
+ 513..514 'f': fn(u8) -> u32
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait_deref_with_ty_default() {
+ check_infer(
+ r#"
+//- minicore: deref, fn
+struct Foo;
+
+impl Foo {
+ fn foo(&self) -> usize {}
+}
+
+struct Lazy<T, F = fn() -> T>(F);
+
+impl<T, F> Lazy<T, F> {
+ pub fn new(f: F) -> Lazy<T, F> {}
+}
+
+impl<T, F: FnOnce() -> T> core::ops::Deref for Lazy<T, F> {
+ type Target = T;
+}
+
+fn test() {
+ let lazy1: Lazy<Foo, _> = Lazy::new(|| Foo);
+ let r1 = lazy1.foo();
+
+ fn make_foo_fn() -> Foo {}
+ let make_foo_fn_ptr: fn() -> Foo = make_foo_fn;
+ let lazy2: Lazy<Foo, _> = Lazy::new(make_foo_fn_ptr);
+ let r2 = lazy2.foo();
+}"#,
+ expect![[r#"
+ 36..40 'self': &Foo
+ 51..53 '{}': usize
+ 131..132 'f': F
+ 151..153 '{}': Lazy<T, F>
+ 251..497 '{ ...o(); }': ()
+ 261..266 'lazy1': Lazy<Foo, || -> Foo>
+ 283..292 'Lazy::new': fn new<Foo, || -> Foo>(|| -> Foo) -> Lazy<Foo, || -> Foo>
+ 283..300 'Lazy::...| Foo)': Lazy<Foo, || -> Foo>
+ 293..299 '|| Foo': || -> Foo
+ 296..299 'Foo': Foo
+ 310..312 'r1': usize
+ 315..320 'lazy1': Lazy<Foo, || -> Foo>
+ 315..326 'lazy1.foo()': usize
+ 368..383 'make_foo_fn_ptr': fn() -> Foo
+ 399..410 'make_foo_fn': fn make_foo_fn() -> Foo
+ 420..425 'lazy2': Lazy<Foo, fn() -> Foo>
+ 442..451 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
+ 442..468 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
+ 452..467 'make_foo_fn_ptr': fn() -> Foo
+ 478..480 'r2': usize
+ 483..488 'lazy2': Lazy<Foo, fn() -> Foo>
+ 483..494 'lazy2.foo()': usize
+ 357..359 '{}': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn closure_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+enum Option<T> { Some(T), None }
+impl<T> Option<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Option<U> { loop {} }
+}
+
+fn test() {
+ let x = Option::Some(1u32);
+ x.map(|v| v + 1);
+ x.map(|_v| 1u64);
+ let y: Option<i64> = x.map(|_v| 1);
+}"#,
+ expect![[r#"
+ 86..90 'self': Option<T>
+ 92..93 'f': F
+ 111..122 '{ loop {} }': Option<U>
+ 113..120 'loop {}': !
+ 118..120 '{}': ()
+ 136..255 '{ ... 1); }': ()
+ 146..147 'x': Option<u32>
+ 150..162 'Option::Some': Some<u32>(u32) -> Option<u32>
+ 150..168 'Option...(1u32)': Option<u32>
+ 163..167 '1u32': u32
+ 174..175 'x': Option<u32>
+ 174..190 'x.map(...v + 1)': Option<u32>
+ 180..189 '|v| v + 1': |u32| -> u32
+ 181..182 'v': u32
+ 184..185 'v': u32
+ 184..189 'v + 1': u32
+ 188..189 '1': u32
+ 196..197 'x': Option<u32>
+ 196..212 'x.map(... 1u64)': Option<u64>
+ 202..211 '|_v| 1u64': |u32| -> u64
+ 203..205 '_v': u32
+ 207..211 '1u64': u64
+ 222..223 'y': Option<i64>
+ 239..240 'x': Option<u32>
+ 239..252 'x.map(|_v| 1)': Option<i64>
+ 245..251 '|_v| 1': |u32| -> i64
+ 246..248 '_v': u32
+ 250..251 '1': i64
+ "#]],
+ );
+}
+
+#[test]
+fn closure_2() {
+ check_types(
+ r#"
+//- minicore: add, fn
+
+impl core::ops::Add for u64 {
+ type Output = Self;
+ fn add(self, rhs: u64) -> Self::Output {0}
+}
+
+impl core::ops::Add for u128 {
+ type Output = Self;
+ fn add(self, rhs: u128) -> Self::Output {0}
+}
+
+fn test<F: FnOnce(u32) -> u64>(f: F) {
+ f(1);
+ // ^ u32
+ //^^^^ u64
+ let g = |v| v + 1;
+ //^^^^^ u64
+ //^^^^^^^^^ |u64| -> u64
+ g(1u64);
+ //^^^^^^^ u64
+ let h = |v| 1u128 + v;
+ //^^^^^^^^^^^^^ |u128| -> u128
+}"#,
+ );
+}
+
+#[test]
+fn closure_as_argument_inference_order() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn foo1<T, U, F: FnOnce(T) -> U>(x: T, f: F) -> U { loop {} }
+fn foo2<T, U, F: FnOnce(T) -> U>(f: F, x: T) -> U { loop {} }
+
+struct S;
+impl S {
+ fn method(self) -> u64;
+
+ fn foo1<T, U, F: FnOnce(T) -> U>(self, x: T, f: F) -> U { loop {} }
+ fn foo2<T, U, F: FnOnce(T) -> U>(self, f: F, x: T) -> U { loop {} }
+}
+
+fn test() {
+ let x1 = foo1(S, |s| s.method());
+ let x2 = foo2(|s| s.method(), S);
+ let x3 = S.foo1(S, |s| s.method());
+ let x4 = S.foo2(|s| s.method(), S);
+}"#,
+ expect![[r#"
+ 33..34 'x': T
+ 39..40 'f': F
+ 50..61 '{ loop {} }': U
+ 52..59 'loop {}': !
+ 57..59 '{}': ()
+ 95..96 'f': F
+ 101..102 'x': T
+ 112..123 '{ loop {} }': U
+ 114..121 'loop {}': !
+ 119..121 '{}': ()
+ 158..162 'self': S
+ 210..214 'self': S
+ 216..217 'x': T
+ 222..223 'f': F
+ 233..244 '{ loop {} }': U
+ 235..242 'loop {}': !
+ 240..242 '{}': ()
+ 282..286 'self': S
+ 288..289 'f': F
+ 294..295 'x': T
+ 305..316 '{ loop {} }': U
+ 307..314 'loop {}': !
+ 312..314 '{}': ()
+ 330..489 '{ ... S); }': ()
+ 340..342 'x1': u64
+ 345..349 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
+ 345..368 'foo1(S...hod())': u64
+ 350..351 'S': S
+ 353..367 '|s| s.method()': |S| -> u64
+ 354..355 's': S
+ 357..358 's': S
+ 357..367 's.method()': u64
+ 378..380 'x2': u64
+ 383..387 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
+ 383..406 'foo2(|...(), S)': u64
+ 388..402 '|s| s.method()': |S| -> u64
+ 389..390 's': S
+ 392..393 's': S
+ 392..402 's.method()': u64
+ 404..405 'S': S
+ 416..418 'x3': u64
+ 421..422 'S': S
+ 421..446 'S.foo1...hod())': u64
+ 428..429 'S': S
+ 431..445 '|s| s.method()': |S| -> u64
+ 432..433 's': S
+ 435..436 's': S
+ 435..445 's.method()': u64
+ 456..458 'x4': u64
+ 461..462 'S': S
+ 461..486 'S.foo2...(), S)': u64
+ 468..482 '|s| s.method()': |S| -> u64
+ 469..470 's': S
+ 472..473 's': S
+ 472..482 's.method()': u64
+ 484..485 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn fn_item_fn_trait() {
+ check_types(
+ r#"
+//- minicore: fn
+struct S;
+
+fn foo() -> S { S }
+
+fn takes_closure<U, F: FnOnce() -> U>(f: F) -> U { f() }
+
+fn test() {
+ takes_closure(foo);
+} //^^^^^^^^^^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_1() {
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T: Trait>() where T::Item: Trait2 {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_2() {
+ check_types(
+ r#"
+trait Trait<T> {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_on_impl_self() {
+ check_infer(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+
+ fn f(&self, x: Self::Item);
+}
+
+struct S;
+
+impl Trait for S {
+ type Item = u32;
+ fn f(&self, x: Self::Item) { let y = x; }
+}
+
+struct S2;
+
+impl Trait for S2 {
+ type Item = i32;
+ fn f(&self, x: <Self>::Item) { let y = x; }
+}"#,
+ expect![[r#"
+ 40..44 'self': &Self
+ 46..47 'x': Trait::Item<Self>
+ 126..130 'self': &S
+ 132..133 'x': u32
+ 147..161 '{ let y = x; }': ()
+ 153..154 'y': u32
+ 157..158 'x': u32
+ 228..232 'self': &S2
+ 234..235 'x': i32
+ 251..265 '{ let y = x; }': ()
+ 257..258 'y': i32
+ 261..262 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn unselected_projection_on_trait_self() {
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+
+ fn f(&self) -> Self::Item { loop {} }
+}
+
+struct S;
+impl Trait for S {
+ type Item = u32;
+}
+
+fn test() {
+ S.f();
+} //^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_chalk_fold() {
+ check_types(
+ r#"
+trait Interner {}
+trait Fold<I: Interner, TI = I> {
+ type Result;
+}
+
+struct Ty<I: Interner> {}
+impl<I: Interner, TI: Interner> Fold<I, TI> for Ty<I> {
+ type Result = Ty<TI>;
+}
+
+fn fold<I: Interner, T>(interner: &I, t: T) -> T::Result
+where
+ T: Fold<I, I>,
+{
+ loop {}
+}
+
+fn foo<I: Interner>(interner: &I, t: Ty<I>) {
+ fold(interner, t);
+} //^^^^^^^^^^^^^^^^^ Ty<I>
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty() {
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self);
+}
+
+struct S;
+
+impl Trait<Self> for S {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty_cycle() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self);
+}
+
+struct S<T>;
+
+impl Trait for S<Self> {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_1() {
+ // This is not a cycle, because the `T: Trait2<T::Item>` bound depends only on the `T: Trait`
+ // bound, not on itself (since only `Trait` can define `Item`).
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+}
+
+trait Trait2<T> {}
+
+fn test<T: Trait>() where T: Trait2<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_2() {
+ // this is a legitimate cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Trait<T> {
+ type Item;
+}
+
+fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_3() {
+ // this is a cycle for rustc; we currently accept it
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+ type OtherItem;
+}
+
+fn test<T>() where T: Trait<OtherItem = T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_no_cycle() {
+ // this is not a cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Index {
+ type Output;
+}
+
+type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+}
+
+pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+}
+
+fn test<T>(t: T) where T: UnificationStoreMut {
+ let x;
+ t.push(x);
+ let y: Key<T>;
+ (x, y);
+} //^^^^^^ (UnificationStoreBase::Key<T>, UnificationStoreBase::Key<T>)
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_1() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// workaround for Chalk assoc type normalization problems
+pub struct S<T>;
+impl<T: Iterator> Iterator for S<T> {
+ type Item = <T as Iterator>::Item;
+}
+
+fn test<I: Iterator<Item: OtherTrait<u32>>>() {
+ let x: <S<I> as Iterator>::Item;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_2() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+
+fn test<I: Iterator<Item: Iterator<Item = u32>>>() {
+ let x: <<I as Iterator>::Item as Iterator>::Item;
+ x;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn proc_macro_server_types() {
+ check_infer(
+ r#"
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ TokenStream {
+ fn new() -> $S::TokenStream;
+ },
+ Group {
+ },
+ }
+ };
+}
+macro_rules! associated_item {
+ (type TokenStream) =>
+ (type TokenStream: 'static;);
+ (type Group) =>
+ (type Group: 'static;);
+ ($($item:tt)*) => ($($item)*;)
+}
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method($($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+
+with_api!(Self, self_, declare_server_traits);
+struct G {}
+struct T {}
+struct RustAnalyzer;
+impl Types for RustAnalyzer {
+ type TokenStream = T;
+ type Group = G;
+}
+
+fn make<T>() -> T { loop {} }
+impl TokenStream for RustAnalyzer {
+ fn new() -> Self::TokenStream {
+ let group: Self::Group = make();
+ make()
+ }
+}"#,
+ expect![[r#"
+ 1075..1086 '{ loop {} }': T
+ 1077..1084 'loop {}': !
+ 1082..1084 '{}': ()
+ 1157..1220 '{ ... }': T
+ 1171..1176 'group': G
+ 1192..1196 'make': fn make<G>() -> G
+ 1192..1198 'make()': G
+ 1208..1212 'make': fn make<T>() -> T
+ 1208..1214 'make()': T
+ "#]],
+ );
+}
+
+#[test]
+fn unify_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {}
+
+fn foo(x: impl Trait<u32>) { loop {} }
+fn bar<T>(x: impl Trait<T>) -> T { loop {} }
+
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn default<T>() -> T { loop {} }
+
+fn test() -> impl Trait<i32> {
+ let s1 = S(default());
+ foo(s1);
+ let x: i32 = bar(S(default()));
+ S(default())
+}"#,
+ expect![[r#"
+ 26..27 'x': impl Trait<u32>
+ 46..57 '{ loop {} }': ()
+ 48..55 'loop {}': !
+ 53..55 '{}': ()
+ 68..69 'x': impl Trait<T>
+ 91..102 '{ loop {} }': T
+ 93..100 'loop {}': !
+ 98..100 '{}': ()
+ 171..182 '{ loop {} }': T
+ 173..180 'loop {}': !
+ 178..180 '{}': ()
+ 213..309 '{ ...t()) }': S<i32>
+ 223..225 's1': S<u32>
+ 228..229 'S': S<u32>(u32) -> S<u32>
+ 228..240 'S(default())': S<u32>
+ 230..237 'default': fn default<u32>() -> u32
+ 230..239 'default()': u32
+ 246..249 'foo': fn foo(S<u32>)
+ 246..253 'foo(s1)': ()
+ 250..252 's1': S<u32>
+ 263..264 'x': i32
+ 272..275 'bar': fn bar<i32>(S<i32>) -> i32
+ 272..289 'bar(S(...lt()))': i32
+ 276..277 'S': S<i32>(i32) -> S<i32>
+ 276..288 'S(default())': S<i32>
+ 278..285 'default': fn default<i32>() -> i32
+ 278..287 'default()': i32
+ 295..296 'S': S<i32>(i32) -> S<i32>
+ 295..307 'S(default())': S<i32>
+ 297..304 'default': fn default<i32>() -> i32
+ 297..306 'default()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn assoc_types_from_bounds() {
+ check_infer(
+ r#"
+//- minicore: fn
+trait T {
+ type O;
+}
+
+impl T for () {
+ type O = ();
+}
+
+fn f<X, F>(_v: F)
+where
+ X: T,
+ F: FnOnce(&X::O),
+{ }
+
+fn main() {
+ f::<(), _>(|z| { z; });
+}"#,
+ expect![[r#"
+ 72..74 '_v': F
+ 117..120 '{ }': ()
+ 132..163 '{ ... }); }': ()
+ 138..148 'f::<(), _>': fn f<(), |&()| -> ()>(|&()| -> ())
+ 138..160 'f::<()... z; })': ()
+ 149..159 '|z| { z; }': |&()| -> ()
+ 150..151 'z': &()
+ 153..159 '{ z; }': ()
+ 155..156 'z': &()
+ "#]],
+ );
+}
+
+#[test]
+fn associated_type_bound() {
+ check_types(
+ r#"
+pub trait Trait {
+ type Item: OtherTrait<u32>;
+}
+pub trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// this is just a workaround for chalk#234
+pub struct S<T>;
+impl<T: Trait> Trait for S<T> {
+ type Item = <T as Trait>::Item;
+}
+
+fn test<T: Trait>() {
+ let y: <S<T> as Trait>::Item = no_matter;
+ y.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_through_chalk() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+}
+trait Trait {
+ fn foo(&self);
+}
+
+fn test(x: Box<dyn Trait>) {
+ x.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn string_to_owned() {
+ check_types(
+ r#"
+struct String {}
+pub trait ToOwned {
+ type Owned;
+ fn to_owned(&self) -> Self::Owned;
+}
+impl ToOwned for str {
+ type Owned = String;
+}
+fn test() {
+ "foo".to_owned();
+} //^^^^^^^^^^^^^^^^ String
+"#,
+ );
+}
+
+#[test]
+fn iterator_chain() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+pub trait Iterator {
+ type Item;
+
+ fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
+ where
+ F: FnMut(Self::Item) -> Option<B>,
+ { loop {} }
+
+ fn for_each<F>(self, f: F)
+ where
+ F: FnMut(Self::Item),
+ { loop {} }
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ fn into_iter(self) -> Self::IntoIter;
+}
+
+pub struct FilterMap<I, F> { }
+impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+where
+ F: FnMut(I::Item) -> Option<B>,
+{
+ type Item = B;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+
+ fn into_iter(self) -> I {
+ self
+ }
+}
+
+struct Vec<T> {}
+impl<T> Vec<T> {
+ fn new() -> Self { loop {} }
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+}
+
+pub struct IntoIter<T> { }
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+}
+
+fn main() {
+ Vec::<i32>::new().into_iter()
+ .filter_map(|x| if x > 0 { Some(x as u32) } else { None })
+ .for_each(|y| { y; });
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 67..68 'f': F
+ 152..163 '{ loop {} }': FilterMap<Self, F>
+ 154..161 'loop {}': !
+ 159..161 '{}': ()
+ 184..188 'self': Self
+ 190..191 'f': F
+ 240..251 '{ loop {} }': ()
+ 242..249 'loop {}': !
+ 247..249 '{}': ()
+ 360..364 'self': Self
+ 689..693 'self': I
+ 700..720 '{ ... }': I
+ 710..714 'self': I
+ 779..790 '{ loop {} }': Vec<T>
+ 781..788 'loop {}': !
+ 786..788 '{}': ()
+ 977..1104 '{ ... }); }': ()
+ 983..998 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
+ 983..1000 'Vec::<...:new()': Vec<i32>
+ 983..1012 'Vec::<...iter()': IntoIter<i32>
+ 983..1075 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
+ 983..1101 'Vec::<... y; })': ()
+ 1029..1074 '|x| if...None }': |i32| -> Option<u32>
+ 1030..1031 'x': i32
+ 1033..1074 'if x >...None }': Option<u32>
+ 1036..1037 'x': i32
+ 1036..1041 'x > 0': bool
+ 1040..1041 '0': i32
+ 1042..1060 '{ Some...u32) }': Option<u32>
+ 1044..1048 'Some': Some<u32>(u32) -> Option<u32>
+ 1044..1058 'Some(x as u32)': Option<u32>
+ 1049..1050 'x': i32
+ 1049..1057 'x as u32': u32
+ 1066..1074 '{ None }': Option<u32>
+ 1068..1072 'None': Option<u32>
+ 1090..1100 '|y| { y; }': |u32| -> ()
+ 1091..1092 'y': u32
+ 1094..1100 '{ y; }': ()
+ 1096..1097 'y': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_assoc() {
+ check_types(
+ r#"
+struct Bar;
+struct Foo;
+
+trait A {
+ type OutputA;
+}
+
+impl A for Bar {
+ type OutputA = Foo;
+}
+
+trait B {
+ type Output;
+ fn foo() -> Self::Output;
+}
+
+impl<T:A> B for T {
+ type Output = T::OutputA;
+ fn foo() -> Self::Output { loop {} }
+}
+
+fn main() {
+ Bar::foo();
+} //^^^^^^^^^^ Foo
+"#,
+ );
+}
+
+#[test]
+fn trait_object_no_coercion() {
+ check_infer_with_mismatches(
+ r#"
+trait Foo {}
+
+fn foo(x: &dyn Foo) {}
+
+fn test(x: &dyn Foo) {
+ foo(x);
+}"#,
+ expect![[r#"
+ 21..22 'x': &dyn Foo
+ 34..36 '{}': ()
+ 46..47 'x': &dyn Foo
+ 59..74 '{ foo(x); }': ()
+ 65..68 'foo': fn foo(&dyn Foo)
+ 65..71 'foo(x)': ()
+ 69..70 'x': &dyn Foo
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+struct IsCopy;
+impl Copy for IsCopy {}
+struct NotCopy;
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ IsCopy.test();
+ NotCopy.test();
+ (IsCopy, IsCopy).test();
+ (IsCopy, NotCopy).test();
+}"#,
+ expect![[r#"
+ 78..82 'self': &Self
+ 134..235 '{ ...t(); }': ()
+ 140..146 'IsCopy': IsCopy
+ 140..153 'IsCopy.test()': bool
+ 159..166 'NotCopy': NotCopy
+ 159..173 'NotCopy.test()': {unknown}
+ 179..195 '(IsCop...sCopy)': (IsCopy, IsCopy)
+ 179..202 '(IsCop...test()': bool
+ 180..186 'IsCopy': IsCopy
+ 188..194 'IsCopy': IsCopy
+ 208..225 '(IsCop...tCopy)': (IsCopy, NotCopy)
+ 208..232 '(IsCop...test()': {unknown}
+ 209..215 'IsCopy': IsCopy
+ 217..224 'NotCopy': NotCopy
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_def_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+fn foo() {}
+fn bar<T: Copy>(T) -> T {}
+struct Struct(usize);
+enum Enum { Variant(usize) }
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ foo.test();
+ bar.test();
+ Struct.test();
+ Enum::Variant.test();
+}"#,
+ expect![[r#"
+ 9..11 '{}': ()
+ 28..29 'T': {unknown}
+ 36..38 '{}': T
+ 36..38: expected T, got ()
+ 113..117 'self': &Self
+ 169..249 '{ ...t(); }': ()
+ 175..178 'foo': fn foo()
+ 175..185 'foo.test()': bool
+ 191..194 'bar': fn bar<{unknown}>({unknown}) -> {unknown}
+ 191..201 'bar.test()': bool
+ 207..213 'Struct': Struct(usize) -> Struct
+ 207..220 'Struct.test()': bool
+ 226..239 'Enum::Variant': Variant(usize) -> Enum
+ 226..246 'Enum::...test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_ptr_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test(f1: fn(), f2: fn(usize) -> u8, f3: fn(u8, u8) -> &u8) {
+ f1.test();
+ f2.test();
+ f3.test();
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 76..78 'f1': fn()
+ 86..88 'f2': fn(usize) -> u8
+ 107..109 'f3': fn(u8, u8) -> &u8
+ 130..178 '{ ...t(); }': ()
+ 136..138 'f1': fn()
+ 136..145 'f1.test()': bool
+ 151..153 'f2': fn(usize) -> u8
+ 151..160 'f2.test()': bool
+ 166..168 'f3': fn(u8, u8) -> &u8
+ 166..175 'f3.test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_sized() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Test { fn test(&self) -> bool; }
+impl<T: Sized> Test for T {}
+
+fn test() {
+ 1u8.test();
+ (*"foo").test(); // not Sized
+ (1u8, 1u8).test();
+ (1u8, *"foo").test(); // not Sized
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 79..194 '{ ...ized }': ()
+ 85..88 '1u8': u8
+ 85..95 '1u8.test()': bool
+ 101..116 '(*"foo").test()': {unknown}
+ 102..108 '*"foo"': str
+ 103..108 '"foo"': &str
+ 135..145 '(1u8, 1u8)': (u8, u8)
+ 135..152 '(1u8, ...test()': bool
+ 136..139 '1u8': u8
+ 141..144 '1u8': u8
+ 158..171 '(1u8, *"foo")': (u8, str)
+ 158..178 '(1u8, ...test()': {unknown}
+ 159..162 '1u8': u8
+ 164..170 '*"foo"': str
+ 165..170 '"foo"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn integer_range_iterate() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ for x in 0..100 { x; }
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+}
+
+pub mod iter {
+ pub trait Iterator {
+ type Item;
+ }
+
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ }
+
+ impl<T> IntoIterator for T where T: Iterator {
+ type Item = <T as Iterator>::Item;
+ type IntoIter = Self;
+ }
+}
+
+trait Step {}
+impl Step for i32 {}
+impl Step for i64 {}
+
+impl<A: Step> iter::Iterator for ops::Range<A> {
+ type Item = A;
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_closure_arg() {
+ check_infer(
+ r#"
+//- /lib.rs
+
+enum Option<T> {
+ None,
+ Some(T)
+}
+
+fn foo() {
+ let s = Option::None;
+ let f = |x: Option<i32>| {};
+ (&f)(s)
+}"#,
+ expect![[r#"
+ 52..126 '{ ...)(s) }': ()
+ 62..63 's': Option<i32>
+ 66..78 'Option::None': Option<i32>
+ 88..89 'f': |Option<i32>| -> ()
+ 92..111 '|x: Op...2>| {}': |Option<i32>| -> ()
+ 93..94 'x': Option<i32>
+ 109..111 '{}': ()
+ 117..124 '(&f)(s)': ()
+ 118..120 '&f': &|Option<i32>| -> ()
+ 119..120 'f': |Option<i32>| -> ()
+ 122..123 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_fn_param_informs_call_site_closure_signature() {
+ cov_mark::check!(dyn_fn_param_informs_call_site_closure_signature);
+ check_types(
+ r#"
+//- minicore: fn, coerce_unsized
+struct S;
+impl S {
+ fn inherent(&self) -> u8 { 0 }
+}
+fn take_dyn_fn(f: &dyn Fn(S)) {}
+
+fn f() {
+ take_dyn_fn(&|x| { x.inherent(); });
+ //^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_fn_trait_arg() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+fn foo<F, T>(f: F) -> T
+where
+ F: Fn(Option<i32>) -> T,
+{
+ let s = None;
+ f(s)
+}
+"#,
+ expect![[r#"
+ 13..14 'f': F
+ 59..89 '{ ...f(s) }': T
+ 69..70 's': Option<i32>
+ 73..77 'None': Option<i32>
+ 83..84 'f': F
+ 83..87 'f(s)': T
+ 85..86 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_box_fn_arg() {
+ // The type mismatch is because we don't define Unsize and CoerceUnsized
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, deref, option
+#[lang = "owned_box"]
+pub struct Box<T: ?Sized> {
+ inner: *mut T,
+}
+
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &self.inner
+ }
+}
+
+fn foo() {
+ let s = None;
+ let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {});
+ f(&s);
+}"#,
+ expect![[r#"
+ 154..158 'self': &Box<T>
+ 166..193 '{ ... }': &T
+ 176..187 '&self.inner': &*mut T
+ 177..181 'self': &Box<T>
+ 177..187 'self.inner': *mut T
+ 206..296 '{ ...&s); }': ()
+ 216..217 's': Option<i32>
+ 220..224 'None': Option<i32>
+ 234..235 'f': Box<dyn FnOnce(&Option<i32>)>
+ 269..282 'box (|ps| {})': Box<|&Option<i32>| -> ()>
+ 274..281 '|ps| {}': |&Option<i32>| -> ()
+ 275..277 'ps': &Option<i32>
+ 279..281 '{}': ()
+ 288..289 'f': Box<dyn FnOnce(&Option<i32>)>
+ 288..293 'f(&s)': ()
+ 290..292 '&s': &Option<i32>
+ 291..292 's': Option<i32>
+ 269..282: expected Box<dyn FnOnce(&Option<i32>)>, got Box<|&Option<i32>| -> ()>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_dyn_fn_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: &dyn Fn() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn infer_dyn_fn_once_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: dyn FnOnce() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn variable_kinds_1() {
+ check_types(
+ r#"
+trait Trait<T> { fn get(self, t: T) -> T; }
+struct S;
+impl Trait<u128> for S {}
+impl Trait<f32> for S {}
+fn test() {
+ S.get(1);
+ //^^^^^^^^ u128
+ S.get(1.);
+ //^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn variable_kinds_2() {
+ check_types(
+ r#"
+trait Trait { fn get(self) -> Self; }
+impl Trait for u128 {}
+impl Trait for f32 {}
+fn test() {
+ 1.get();
+ //^^^^^^^ u128
+ (1.).get();
+ //^^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check_types(
+ r#"
+mod tr {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+}
+
+struct Tr;
+impl crate::tr::Tr for Tr {}
+
+use crate::tr::Tr as _;
+fn test() {
+ Tr.method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn inner_use() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_in_scope_with_inner_item() {
+ check_infer(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+use m::Tr;
+
+fn f() {
+ fn inner() {
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+}"#,
+ expect![[r#"
+ 46..50 'self': &Self
+ 58..63 '{ 0 }': u8
+ 60..61 '0': u8
+ 115..185 '{ ... } }': ()
+ 132..183 '{ ... }': ()
+ 142..144 '()': ()
+ 142..153 '().method()': u8
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_in_block() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+
+ {
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+ }
+
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+}
+ "#,
+ );
+}
+
+#[test]
+fn nested_inner_function_calling_self() {
+ check_infer(
+ r#"
+struct S;
+fn f() {
+ fn inner() -> S {
+ let s = inner();
+ }
+}"#,
+ expect![[r#"
+ 17..73 '{ ... } }': ()
+ 39..71 '{ ... }': S
+ 53..54 's': S
+ 57..62 'inner': fn inner() -> S
+ 57..64 'inner()': S
+ "#]],
+ )
+}
+
+#[test]
+fn infer_default_trait_type_parameter() {
+ check_infer(
+ r#"
+struct A;
+
+trait Op<RHS=Self> {
+ type Output;
+
+ fn do_op(self, rhs: RHS) -> Self::Output;
+}
+
+impl Op for A {
+ type Output = bool;
+
+ fn do_op(self, rhs: Self) -> Self::Output {
+ true
+ }
+}
+
+fn test() {
+ let x = A;
+ let y = A;
+ let r = x.do_op(y);
+}"#,
+ expect![[r#"
+ 63..67 'self': Self
+ 69..72 'rhs': RHS
+ 153..157 'self': A
+ 159..162 'rhs': A
+ 186..206 '{ ... }': bool
+ 196..200 'true': bool
+ 220..277 '{ ...(y); }': ()
+ 230..231 'x': A
+ 234..235 'A': A
+ 245..246 'y': A
+ 249..250 'A': A
+ 260..261 'r': bool
+ 264..265 'x': A
+ 264..274 'x.do_op(y)': bool
+ 272..273 'y': A
+ "#]],
+ )
+}
+
+#[test]
+fn qualified_path_as_qualified_trait() {
+ check_infer(
+ r#"
+mod foo {
+
+ pub trait Foo {
+ type Target;
+ }
+ pub trait Bar {
+ type Output;
+ fn boo() -> Self::Output {
+ loop {}
+ }
+ }
+}
+
+struct F;
+impl foo::Foo for F {
+ type Target = ();
+}
+impl foo::Bar for F {
+ type Output = <F as foo::Foo>::Target;
+}
+
+fn foo() {
+ use foo::Bar;
+ let x = <F as Bar>::boo();
+}"#,
+ expect![[r#"
+ 132..163 '{ ... }': Bar::Output<Self>
+ 146..153 'loop {}': !
+ 151..153 '{}': ()
+ 306..358 '{ ...o(); }': ()
+ 334..335 'x': ()
+ 338..353 '<F as Bar>::boo': fn boo<F>() -> <F as Bar>::Output
+ 338..355 '<F as ...:boo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn renamed_extern_crate_in_block() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:serde
+use serde::Deserialize;
+
+struct Foo {}
+
+const _ : () = {
+ extern crate serde as _serde;
+ impl _serde::Deserialize for Foo {
+ fn deserialize() -> u8 { 0 }
+ }
+};
+
+fn foo() {
+ Foo::deserialize();
+ //^^^^^^^^^^^^^^^^^^ u8
+}
+
+//- /serde.rs crate:serde
+
+pub trait Deserialize {
+ fn deserialize() -> u8;
+}"#,
+ );
+}
+
+#[test]
+fn bin_op_with_rhs_is_self_for_assoc_bound() {
+ check_no_mismatches(
+ r#"//- minicore: eq
+ fn repro<T>(t: T) -> bool
+where
+ T: Request,
+ T::Output: Convertable,
+{
+ let a = execute(&t).convert();
+ let b = execute(&t).convert();
+ a.eq(&b);
+ let a = execute(&t).convert2();
+ let b = execute(&t).convert2();
+ a.eq(&b)
+}
+fn execute<T>(t: &T) -> T::Output
+where
+ T: Request,
+{
+ <T as Request>::output()
+}
+trait Convertable {
+ type TraitSelf: PartialEq<Self::TraitSelf>;
+ type AssocAsDefaultSelf: PartialEq;
+ fn convert(self) -> Self::AssocAsDefaultSelf;
+ fn convert2(self) -> Self::TraitSelf;
+}
+trait Request {
+ type Output;
+ fn output() -> Self::Output;
+}
+ "#,
+ );
+}
+
+#[test]
+fn bin_op_adt_with_rhs_primitive() {
+ check_infer_with_mismatches(
+ r#"
+#[lang = "add"]
+pub trait Add<Rhs = Self> {
+ type Output;
+ fn add(self, rhs: Rhs) -> Self::Output;
+}
+
+struct Wrapper(u32);
+impl Add<u32> for Wrapper {
+ type Output = Self;
+ fn add(self, rhs: u32) -> Wrapper {
+ Wrapper(rhs)
+ }
+}
+fn main(){
+ let wrapped = Wrapper(10);
+ let num: u32 = 2;
+ let res = wrapped + num;
+
+}"#,
+ expect![[r#"
+ 72..76 'self': Self
+ 78..81 'rhs': Rhs
+ 192..196 'self': Wrapper
+ 198..201 'rhs': u32
+ 219..247 '{ ... }': Wrapper
+ 229..236 'Wrapper': Wrapper(u32) -> Wrapper
+ 229..241 'Wrapper(rhs)': Wrapper
+ 237..240 'rhs': u32
+ 259..345 '{ ...um; }': ()
+ 269..276 'wrapped': Wrapper
+ 279..286 'Wrapper': Wrapper(u32) -> Wrapper
+ 279..290 'Wrapper(10)': Wrapper
+ 287..289 '10': u32
+ 300..303 'num': u32
+ 311..312 '2': u32
+ 322..325 'res': Wrapper
+ 328..335 'wrapped': Wrapper
+ 328..341 'wrapped + num': Wrapper
+ 338..341 'num': u32
+ "#]],
+ )
+}
+
+#[test]
+fn array_length() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl T for [u8; 4] {
+ type Output = usize;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+impl T for [u8; 2] {
+ type Output = u8;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+ let v3 = [0u8; 4];
+ let v4 = v3.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 133..137 'self': &[u8; 4]
+ 155..172 '{ ... }': usize
+ 165..166 '2': usize
+ 236..240 'self': &[u8; 2]
+ 258..275 '{ ... }': u8
+ 268..269 '2': u8
+ 289..392 '{ ...g(); }': ()
+ 299..300 'v': [u8; 2]
+ 303..311 '[0u8; 2]': [u8; 2]
+ 304..307 '0u8': u8
+ 309..310 '2': usize
+ 321..323 'v2': u8
+ 326..327 'v': [u8; 2]
+ 326..338 'v.do_thing()': u8
+ 348..350 'v3': [u8; 4]
+ 353..361 '[0u8; 4]': [u8; 4]
+ 354..357 '0u8': u8
+ 359..360 '4': usize
+ 371..373 'v4': usize
+ 376..378 'v3': [u8; 4]
+ 376..389 'v3.do_thing()': usize
+ "#]],
+ )
+}
+
+#[test]
+fn const_generics() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl<const L: usize> T for [u8; L] {
+ type Output = [u8; L];
+ fn do_thing(&self) -> Self::Output {
+ *self
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 151..155 'self': &[u8; L]
+ 173..194 '{ ... }': [u8; L]
+ 183..188 '*self': [u8; L]
+ 184..188 'self': &[u8; L]
+ 208..260 '{ ...g(); }': ()
+ 218..219 'v': [u8; 2]
+ 222..230 '[0u8; 2]': [u8; 2]
+ 223..226 '0u8': u8
+ 228..229 '2': usize
+ 240..242 'v2': [u8; 2]
+ 245..246 'v': [u8; 2]
+ 245..257 'v.do_thing()': [u8; 2]
+ "#]],
+ )
+}
+
+#[test]
+fn fn_returning_unit() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn test<F: FnOnce()>(f: F) {
+ let _: () = f();
+}"#,
+ expect![[r#"
+ 21..22 'f': F
+ 27..51 '{ ...f(); }': ()
+ 37..38 '_': ()
+ 45..46 'f': F
+ 45..48 'f()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn trait_in_scope_of_trait_impl() {
+ check_infer(
+ r#"
+mod foo {
+ pub trait Foo {
+ fn foo(self);
+ fn bar(self) -> usize { 0 }
+ }
+}
+impl foo::Foo for u32 {
+ fn foo(self) {
+ let _x = self.bar();
+ }
+}
+ "#,
+ expect![[r#"
+ 45..49 'self': Self
+ 67..71 'self': Self
+ 82..87 '{ 0 }': usize
+ 84..85 '0': usize
+ 131..135 'self': u32
+ 137..173 '{ ... }': ()
+ 151..153 '_x': usize
+ 156..160 'self': u32
+ 156..166 'self.bar()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_async_ret_type() {
+ check_types(
+ r#"
+//- minicore: future, result
+struct Fooey;
+
+impl Fooey {
+ fn collect<B: Convert>(self) -> B {
+ B::new()
+ }
+}
+
+trait Convert {
+ fn new() -> Self;
+}
+impl Convert for u32 {
+ fn new() -> Self { 0 }
+}
+
+async fn get_accounts() -> Result<u32, ()> {
+ let ret = Fooey.collect();
+ // ^^^^^^^^^^^^^^^ u32
+ Ok(ret)
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_1() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S;
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_2() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+struct S;
+
+fn test() {
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ }
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_3() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S1;
+ {
+ struct S2;
+
+ impl Trait<S1> for S2 {
+ fn foo(&self) -> S1 { S1 }
+ }
+
+ S2.foo();
+ // ^^^^^^^^ S1
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn associated_type_sized_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+struct Yes;
+trait IsSized { const IS_SIZED: Yes; }
+impl<T: Sized> IsSized for T { const IS_SIZED: Yes = Yes; }
+
+trait Foo {
+ type Explicit: Sized;
+ type Implicit;
+ type Relaxed: ?Sized;
+}
+fn f<F: Foo>() {
+ F::Explicit::IS_SIZED;
+ F::Implicit::IS_SIZED;
+ F::Relaxed::IS_SIZED;
+}
+"#,
+ expect![[r#"
+ 104..107 'Yes': Yes
+ 212..295 '{ ...ZED; }': ()
+ 218..239 'F::Exp..._SIZED': Yes
+ 245..266 'F::Imp..._SIZED': Yes
+ 272..292 'F::Rel..._SIZED': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_map() {
+ check_types(
+ r#"
+pub struct Key<K, V, P = (K, V)> {}
+
+pub trait Policy {
+ type K;
+ type V;
+}
+
+impl<K, V> Policy for (K, V) {
+ type K = K;
+ type V = V;
+}
+
+pub struct KeyMap<KEY> {}
+
+impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
+ pub fn get(&self, key: &P::K) -> P::V {
+ loop {}
+ }
+}
+
+struct Fn {}
+struct FunctionId {}
+
+fn test() {
+ let key_map: &KeyMap<Key<Fn, FunctionId>> = loop {};
+ let key;
+ let result = key_map.get(key);
+ //^^^^^^ FunctionId
+}
+"#,
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
new file mode 100644
index 000000000..547850b02
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs
@@ -0,0 +1,133 @@
+//! Implementation of Chalk debug helper functions using TLS.
+use std::fmt::{self, Display};
+
+use itertools::Itertools;
+
+use crate::{
+ chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
+ CallableDefId, Interner,
+};
+use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId};
+
+pub(crate) use unsafe_tls::{set_current_program, with_current_program};
+
+pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase);
+
+impl DebugContext<'_> {
+ pub(crate) fn debug_struct_id(
+ &self,
+ id: chalk_db::AdtId,
+ f: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let name = match id.0 {
+ AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
+ AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
+ AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
+ };
+ name.fmt(f)
+ }
+
+ pub(crate) fn debug_trait_id(
+ &self,
+ id: chalk_db::TraitId,
+ f: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let trait_: hir_def::TraitId = from_chalk_trait_id(id);
+ let trait_data = self.0.trait_data(trait_);
+ trait_data.name.fmt(f)
+ }
+
+ pub(crate) fn debug_assoc_type_id(
+ &self,
+ id: chalk_db::AssocTypeId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias: TypeAliasId = from_assoc_type_id(id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
+ }
+
+ pub(crate) fn debug_projection_ty(
+ &self,
+ projection_ty: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ ItemContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ let params = projection_ty.substitution.as_slice(Interner);
+ write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
+ if params.len() > 1 {
+ write!(
+ fmt,
+ "<{}>",
+ &params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
+ )?;
+ }
+ write!(fmt, ">::{}", type_alias_data.name)
+ }
+
+ pub(crate) fn debug_fn_def_id(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let def: CallableDefId = from_chalk(self.0, fn_def_id);
+ let name = match def {
+ CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
+ CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
+ CallableDefId::EnumVariantId(e) => {
+ let enum_data = self.0.enum_data(e.parent);
+ enum_data.variants[e.local_id].name.clone()
+ }
+ };
+ match def {
+ CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
+ write!(fmt, "{{ctor {}}}", name)
+ }
+ }
+ }
+}
+
+mod unsafe_tls {
+ use super::DebugContext;
+ use crate::db::HirDatabase;
+ use scoped_tls::scoped_thread_local;
+
+ scoped_thread_local!(static PROGRAM: DebugContext<'_>);
+
+ pub(crate) fn with_current_program<R>(
+ op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
+ ) -> R {
+ if PROGRAM.is_set() {
+ PROGRAM.with(|prog| op(Some(prog)))
+ } else {
+ op(None)
+ }
+ }
+
+ pub(crate) fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
+ where
+ OP: FnOnce() -> R,
+ {
+ let ctx = DebugContext(p);
+ // we're transmuting the lifetime in the DebugContext to static. This is
+ // fine because we only keep the reference for the lifetime of this
+ // function, *and* the only way to access the context is through
+ // `with_current_program`, which hides the lifetime through the `for`
+ // type.
+ let static_p: &DebugContext<'static> =
+ unsafe { std::mem::transmute::<&DebugContext<'_>, &DebugContext<'static>>(&ctx) };
+ PROGRAM.set(static_p, op)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
new file mode 100644
index 000000000..77afeb321
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -0,0 +1,187 @@
+//! Trait solving using Chalk.
+
+use std::env::var;
+
+use chalk_ir::GoalData;
+use chalk_recursive::Cache;
+use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver};
+
+use base_db::CrateId;
+use hir_def::{lang_item::LangItemTarget, TraitId};
+use stdx::panic_context;
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, AliasEq, AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment,
+ Interner, Solution, TraitRefExt, Ty, TyKind, WhereClause,
+};
+
+/// This controls how much 'time' we give the Chalk solver before giving up.
+const CHALK_SOLVER_FUEL: i32 = 100;
+
+#[derive(Debug, Copy, Clone)]
+pub(crate) struct ChalkContext<'a> {
+ pub(crate) db: &'a dyn HirDatabase,
+ pub(crate) krate: CrateId,
+}
+
+fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
+ let overflow_depth =
+ var("CHALK_OVERFLOW_DEPTH").ok().and_then(|s| s.parse().ok()).unwrap_or(500);
+ let max_size = var("CHALK_SOLVER_MAX_SIZE").ok().and_then(|s| s.parse().ok()).unwrap_or(150);
+ chalk_recursive::RecursiveSolver::new(overflow_depth, max_size, Some(Cache::new()))
+}
+
+/// A set of clauses that we assume to be true. E.g. if we are inside this function:
+/// ```rust
+/// fn foo<T: Default>(t: T) {}
+/// ```
+/// we assume that `T: Default`.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TraitEnvironment {
+ pub krate: CrateId,
+ // FIXME make this a BTreeMap
+ pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>,
+ pub env: chalk_ir::Environment<Interner>,
+}
+
+impl TraitEnvironment {
+ pub fn empty(krate: CrateId) -> Self {
+ TraitEnvironment {
+ krate,
+ traits_from_clauses: Vec::new(),
+ env: chalk_ir::Environment::new(Interner),
+ }
+ }
+
+ pub fn traits_in_scope_from_clauses<'a>(
+ &'a self,
+ ty: Ty,
+ ) -> impl Iterator<Item = TraitId> + 'a {
+ self.traits_from_clauses
+ .iter()
+ .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id))
+ }
+}
+
+/// Solve a trait goal using Chalk.
+pub(crate) fn trait_solve_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: Canonical<InEnvironment<Goal>>,
+) -> Option<Solution> {
+ let _p = profile::span("trait_solve_query").detail(|| match &goal.value.goal.data(Interner) {
+ GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
+ db.trait_data(it.hir_trait_id()).name.to_string()
+ }
+ GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
+ _ => "??".to_string(),
+ });
+ tracing::info!("trait_solve_query({:?})", goal.value.goal);
+
+ if let GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(AliasEq {
+ alias: AliasTy::Projection(projection_ty),
+ ..
+ }))) = &goal.value.goal.data(Interner)
+ {
+ if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(Interner).kind(Interner) {
+ // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible
+ return Some(Solution::Ambig(Guidance::Unknown));
+ }
+ }
+
+ // We currently don't deal with universes (I think / hope they're not yet
+ // relevant for our use cases?)
+ let u_canonical = chalk_ir::UCanonical { canonical: goal, universes: 1 };
+ solve(db, krate, &u_canonical)
+}
+
+fn solve(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
+) -> Option<chalk_solve::Solution<Interner>> {
+ let context = ChalkContext { db, krate };
+ tracing::debug!("solve goal: {:?}", goal);
+ let mut solver = create_chalk_solver();
+
+ let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
+
+ let should_continue = || {
+ db.unwind_if_cancelled();
+ let remaining = fuel.get();
+ fuel.set(remaining - 1);
+ if remaining == 0 {
+ tracing::debug!("fuel exhausted");
+ }
+ remaining > 0
+ };
+
+ let mut solve = || {
+ let _ctx = if is_chalk_debug() || is_chalk_print() {
+ Some(panic_context::enter(format!("solving {:?}", goal)))
+ } else {
+ None
+ };
+ let solution = if is_chalk_print() {
+ let logging_db =
+ LoggingRustIrDatabaseLoggingOnDrop(LoggingRustIrDatabase::new(context));
+ solver.solve_limited(&logging_db.0, goal, &should_continue)
+ } else {
+ solver.solve_limited(&context, goal, &should_continue)
+ };
+
+ tracing::debug!("solve({:?}) => {:?}", goal, solution);
+
+ solution
+ };
+
+ // don't set the TLS for Chalk unless Chalk debugging is active, to make
+ // extra sure we only use it for debugging
+ if is_chalk_debug() {
+ crate::tls::set_current_program(db, solve)
+ } else {
+ solve()
+ }
+}
+
+struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
+
+impl<'a> Drop for LoggingRustIrDatabaseLoggingOnDrop<'a> {
+ fn drop(&mut self) {
+ eprintln!("chalk program:\n{}", self.0);
+ }
+}
+
+fn is_chalk_debug() -> bool {
+ std::env::var("CHALK_DEBUG").is_ok()
+}
+
+fn is_chalk_print() -> bool {
+ std::env::var("CHALK_PRINT").is_ok()
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum FnTrait {
+ FnOnce,
+ FnMut,
+ Fn,
+}
+
+impl FnTrait {
+ const fn lang_item_name(self) -> &'static str {
+ match self {
+ FnTrait::FnOnce => "fn_once",
+ FnTrait::FnMut => "fn_mut",
+ FnTrait::Fn => "fn",
+ }
+ }
+
+ pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
+ let target = db.lang_item(krate, SmolStr::new_inline(self.lang_item_name()))?;
+ match target {
+ LangItemTarget::TraitId(t) => Some(t),
+ _ => None,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
new file mode 100644
index 000000000..83319755d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -0,0 +1,408 @@
+//! Helper functions for working with def, which don't need to be a separate
+//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
+
+use std::iter;
+
+use base_db::CrateId;
+use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex};
+use hir_def::{
+ db::DefDatabase,
+ generics::{
+ GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
+ WherePredicateTypeTarget,
+ },
+ intern::Interned,
+ resolver::{HasResolver, TypeNs},
+ type_ref::{TraitBoundModifier, TypeRef},
+ ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId,
+ TypeOrConstParamId, TypeParamId,
+};
+use hir_expand::name::{known, Name};
+use itertools::Either;
+use rustc_hash::FxHashSet;
+use smallvec::{smallvec, SmallVec};
+use syntax::SmolStr;
+
+use crate::{
+ db::HirDatabase, ChalkTraitId, ConstData, ConstValue, GenericArgData, Interner, Substitution,
+ TraitRef, TraitRefExt, TyKind, WhereClause,
+};
+
+pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator<Item = TraitId> {
+ [
+ db.lang_item(krate, SmolStr::new_inline("fn")),
+ db.lang_item(krate, SmolStr::new_inline("fn_mut")),
+ db.lang_item(krate, SmolStr::new_inline("fn_once")),
+ ]
+ .into_iter()
+ .flatten()
+ .flat_map(|it| it.as_trait())
+}
+
+fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+ let resolver = trait_.resolver(db);
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_.into());
+ let trait_self = generic_params.find_trait_self_param();
+ generic_params
+ .where_predicates
+ .iter()
+ .filter_map(|pred| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound } => {
+ let is_trait = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => match &**type_ref {
+ TypeRef::Path(p) => p.is_self_type(),
+ _ => false,
+ },
+ WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ Some(*local_id) == trait_self
+ }
+ };
+ match is_trait {
+ true => bound.as_path(),
+ false => None,
+ }
+ }
+ WherePredicate::Lifetime { .. } => None,
+ })
+ .filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None))
+ .filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
+ Some(TypeNs::TraitId(t)) => Some(t),
+ _ => None,
+ })
+ .collect()
+}
+
+fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
+ let trait_self = match generic_params.find_trait_self_param() {
+ Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
+ None => return Vec::new(),
+ };
+ db.generic_predicates_for_param(trait_self.parent, trait_self, None)
+ .iter()
+ .filter_map(|pred| {
+ pred.as_ref().filter_map(|pred| match pred.skip_binders() {
+ // FIXME: how to correctly handle higher-ranked bounds here?
+ WhereClause::Implemented(tr) => Some(
+ tr.clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("FIXME unexpected higher-ranked trait bound"),
+ ),
+ _ => None,
+ })
+ })
+ .map(|pred| pred.substitute(Interner, &trait_ref.substitution))
+ .collect()
+}
+
+/// Returns an iterator over the whole super trait hierarchy (including the
+/// trait itself).
+pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+ // we need to take care a bit here to avoid infinite loops in case of cycles
+ // (i.e. if we have `trait A: B; trait B: A;`)
+
+ let mut result = smallvec![trait_];
+ let mut i = 0;
+ while let Some(&t) = result.get(i) {
+ // yeah this is quadratic, but trait hierarchies should be flat
+ // enough that this doesn't matter
+ for tt in direct_super_traits(db, t) {
+ if !result.contains(&tt) {
+ result.push(tt);
+ }
+ }
+ i += 1;
+ }
+ result
+}
+
+/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
+/// super traits. The original trait ref will be included. So the difference to
+/// `all_super_traits` is that we keep track of type parameters; for example if
+/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
+/// `Self: OtherTrait<i32>`.
+pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits<'_> {
+ SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] }
+}
+
+pub(super) struct SuperTraits<'a> {
+ db: &'a dyn HirDatabase,
+ stack: Vec<TraitRef>,
+ seen: FxHashSet<ChalkTraitId>,
+}
+
+impl<'a> SuperTraits<'a> {
+ fn elaborate(&mut self, trait_ref: &TraitRef) {
+ let mut trait_refs = direct_super_trait_refs(self.db, trait_ref);
+ trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id));
+ self.stack.extend(trait_refs);
+ }
+}
+
+impl<'a> Iterator for SuperTraits<'a> {
+ type Item = TraitRef;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(next) = self.stack.pop() {
+ self.elaborate(&next);
+ Some(next)
+ } else {
+ None
+ }
+ }
+}
+
+pub(super) fn associated_type_by_name_including_super_traits(
+ db: &dyn HirDatabase,
+ trait_ref: TraitRef,
+ name: &Name,
+) -> Option<(TraitRef, TypeAliasId)> {
+ all_super_trait_refs(db, trait_ref).find_map(|t| {
+ let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?;
+ Some((t, assoc_type))
+ })
+}
+
+pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
+ let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
+ if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) {
+ let params = db.generic_params(def);
+ let has_consts =
+ params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
+ return if has_consts {
+ // XXX: treat const generic associated types as not existing to avoid crashes (#11769)
+ //
+ // Chalk expects the inner associated type's parameters to come
+ // *before*, not after the trait's generics as we've always done it.
+ // Adapting to this requires a larger refactoring
+ cov_mark::hit!(ignore_gats);
+ Generics { def, params: Interned::new(Default::default()), parent_generics }
+ } else {
+ Generics { def, params, parent_generics }
+ };
+ }
+ Generics { def, params: db.generic_params(def), parent_generics }
+}
+
+#[derive(Debug)]
+pub(crate) struct Generics {
+ def: GenericDefId,
+ pub(crate) params: Interned<GenericParams>,
+ parent_generics: Option<Box<Generics>>,
+}
+
+impl Generics {
+ pub(crate) fn iter_id<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + 'a {
+ self.iter().map(|(id, data)| match data {
+ TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)),
+ TypeOrConstParamData::ConstParamData(_) => {
+ Either::Right(ConstParamId::from_unchecked(id))
+ }
+ })
+ }
+
+ /// Iterator over types and const params of parent, then self.
+ pub(crate) fn iter<'a>(
+ &'a self,
+ ) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
+ let to_toc_id = |it: &'a Generics| {
+ move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p)
+ };
+ self.parent_generics()
+ .into_iter()
+ .flat_map(move |it| it.params.iter().map(to_toc_id(it)))
+ .chain(self.params.iter().map(to_toc_id(self)))
+ }
+
+ /// Iterator over types and const params of parent.
+ pub(crate) fn iter_parent<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
+ self.parent_generics().into_iter().flat_map(|it| {
+ let to_toc_id =
+ move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p);
+ it.params.iter().map(to_toc_id)
+ })
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ let parent = self.parent_generics().map_or(0, Generics::len);
+ let child = self.params.type_or_consts.len();
+ parent + child
+ }
+
+ /// (parent total, self param, type param list, const param list, impl trait)
+ pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) {
+ let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param());
+
+ let self_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::TraitSelf).count();
+ let type_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::TypeParamList).count();
+ let impl_trait_params =
+ ty_iter().filter(|p| p.provenance == TypeParamProvenance::ArgumentImplTrait).count();
+ let const_params = self.params.iter().filter_map(|x| x.1.const_param()).count();
+
+ let parent_len = self.parent_generics().map_or(0, Generics::len);
+ (parent_len, self_params, type_params, const_params, impl_trait_params)
+ }
+
+ pub(crate) fn param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
+ Some(self.find_param(param)?.0)
+ }
+
+ fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
+ if param.parent == self.def {
+ let (idx, (_local_id, data)) = self
+ .params
+ .iter()
+ .enumerate()
+ .find(|(_, (idx, _))| *idx == param.local_id)
+ .unwrap();
+ let parent_len = self.parent_generics().map_or(0, Generics::len);
+ Some((parent_len + idx, data))
+ } else {
+ self.parent_generics().and_then(|g| g.find_param(param))
+ }
+ }
+
+ fn parent_generics(&self) -> Option<&Generics> {
+ self.parent_generics.as_ref().map(|it| &**it)
+ }
+
+ /// Returns a Substitution that replaces each parameter by a bound variable.
+ pub(crate) fn bound_vars_subst(
+ &self,
+ db: &dyn HirDatabase,
+ debruijn: DebruijnIndex,
+ ) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().enumerate().map(|(idx, id)| match id {
+ Either::Left(_) => GenericArgData::Ty(
+ TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner),
+ )
+ .intern(Interner),
+ Either::Right(id) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
+ ty: db.const_param_ty(id),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ }),
+ )
+ }
+
+ /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
+ pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ self.iter_id().map(|id| match id {
+ Either::Left(id) => GenericArgData::Ty(
+ TyKind::Placeholder(crate::to_placeholder_idx(db, id.into())).intern(Interner),
+ )
+ .intern(Interner),
+ Either::Right(id) => GenericArgData::Const(
+ ConstData {
+ value: ConstValue::Placeholder(crate::to_placeholder_idx(db, id.into())),
+ ty: db.const_param_ty(id),
+ }
+ .intern(Interner),
+ )
+ .intern(Interner),
+ }),
+ )
+ }
+}
+
+fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
+ let container = match def {
+ GenericDefId::FunctionId(it) => it.lookup(db).container,
+ GenericDefId::TypeAliasId(it) => it.lookup(db).container,
+ GenericDefId::ConstId(it) => it.lookup(db).container,
+ GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
+ GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
+ };
+
+ match container {
+ ItemContainerId::ImplId(it) => Some(it.into()),
+ ItemContainerId::TraitId(it) => Some(it.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
+ let data = db.function_data(func);
+ if data.has_unsafe_kw() {
+ return true;
+ }
+
+ match func.lookup(db.upcast()).container {
+ hir_def::ItemContainerId::ExternBlockId(block) => {
+ // Function in an `extern` block are always unsafe to call, except when it has
+ // `"rust-intrinsic"` ABI there are a few exceptions.
+ let id = block.lookup(db.upcast()).id;
+ !matches!(
+ id.item_tree(db.upcast())[id.value].abi.as_deref(),
+ Some("rust-intrinsic") if !is_intrinsic_fn_unsafe(&data.name)
+ )
+ }
+ _ => false,
+ }
+}
+
+/// Returns `true` if the given intrinsic is unsafe to call, or false otherwise.
+fn is_intrinsic_fn_unsafe(name: &Name) -> bool {
+ // Should be kept in sync with https://github.com/rust-lang/rust/blob/532d2b14c05f9bc20b2d27cbb5f4550d28343a36/compiler/rustc_typeck/src/check/intrinsic.rs#L72-L106
+ ![
+ known::abort,
+ known::add_with_overflow,
+ known::bitreverse,
+ known::black_box,
+ known::bswap,
+ known::caller_location,
+ known::ctlz,
+ known::ctpop,
+ known::cttz,
+ known::discriminant_value,
+ known::forget,
+ known::likely,
+ known::maxnumf32,
+ known::maxnumf64,
+ known::min_align_of,
+ known::minnumf32,
+ known::minnumf64,
+ known::mul_with_overflow,
+ known::needs_drop,
+ known::ptr_guaranteed_eq,
+ known::ptr_guaranteed_ne,
+ known::rotate_left,
+ known::rotate_right,
+ known::rustc_peek,
+ known::saturating_add,
+ known::saturating_sub,
+ known::size_of,
+ known::sub_with_overflow,
+ known::type_id,
+ known::type_name,
+ known::unlikely,
+ known::variant_count,
+ known::wrapping_add,
+ known::wrapping_mul,
+ known::wrapping_sub,
+ ]
+ .contains(name)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs b/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
new file mode 100644
index 000000000..c47689455
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
@@ -0,0 +1,147 @@
+//! The `TypeWalk` trait (probably to be replaced by Chalk's `Fold` and
+//! `Visit`).
+
+use chalk_ir::interner::HasInterner;
+
+use crate::{
+ AliasEq, AliasTy, Binders, CallableSig, FnSubst, GenericArg, GenericArgData, Interner,
+ OpaqueTy, ProjectionTy, Substitution, TraitRef, Ty, TyKind, WhereClause,
+};
+
+/// This allows walking structures that contain types to do something with those
+/// types, similar to Chalk's `Fold` trait.
+pub trait TypeWalk {
+ fn walk(&self, f: &mut impl FnMut(&Ty));
+}
+
+impl TypeWalk for Ty {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self.kind(Interner) {
+ TyKind::Alias(AliasTy::Projection(p_ty)) => {
+ for t in p_ty.substitution.iter(Interner) {
+ t.walk(f);
+ }
+ }
+ TyKind::Alias(AliasTy::Opaque(o_ty)) => {
+ for t in o_ty.substitution.iter(Interner) {
+ t.walk(f);
+ }
+ }
+ TyKind::Dyn(dyn_ty) => {
+ for p in dyn_ty.bounds.skip_binders().interned().iter() {
+ p.walk(f);
+ }
+ }
+ TyKind::Slice(ty)
+ | TyKind::Array(ty, _)
+ | TyKind::Ref(_, _, ty)
+ | TyKind::Raw(_, ty) => {
+ ty.walk(f);
+ }
+ TyKind::Function(fn_pointer) => {
+ fn_pointer.substitution.0.walk(f);
+ }
+ TyKind::Adt(_, substs)
+ | TyKind::FnDef(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::OpaqueType(_, substs)
+ | TyKind::AssociatedType(_, substs)
+ | TyKind::Closure(.., substs) => {
+ substs.walk(f);
+ }
+ _ => {}
+ }
+ f(self);
+ }
+}
+
+impl<T: TypeWalk> TypeWalk for Vec<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self {
+ t.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for OpaqueTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for ProjectionTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for AliasTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ AliasTy::Projection(it) => it.walk(f),
+ AliasTy::Opaque(it) => it.walk(f),
+ }
+ }
+}
+
+impl TypeWalk for GenericArg {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ if let GenericArgData::Ty(ty) = &self.interned() {
+ ty.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for Substitution {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.iter(Interner) {
+ t.walk(f);
+ }
+ }
+}
+
+impl<T: TypeWalk + HasInterner<Interner = Interner>> TypeWalk for Binders<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.skip_binders().walk(f);
+ }
+}
+
+impl TypeWalk for TraitRef {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substitution.walk(f);
+ }
+}
+
+impl TypeWalk for WhereClause {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ WhereClause::Implemented(trait_ref) => trait_ref.walk(f),
+ WhereClause::AliasEq(alias_eq) => alias_eq.walk(f),
+ _ => {}
+ }
+ }
+}
+
+impl TypeWalk for CallableSig {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.params_and_return.iter() {
+ t.walk(f);
+ }
+ }
+}
+
+impl TypeWalk for AliasEq {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.ty.walk(f);
+ match &self.alias {
+ AliasTy::Projection(projection_ty) => projection_ty.walk(f),
+ AliasTy::Opaque(opaque) => opaque.walk(f),
+ }
+ }
+}
+
+impl TypeWalk for FnSubst<Interner> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.0.walk(f)
+ }
+}