summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /src/tools/rust-analyzer/crates/hir
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir')
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml28
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs177
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs170
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs530
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/from_id.rs293
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/has_source.rs174
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs3639
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs1540
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs473
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs915
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs348
12 files changed, 8303 insertions, 0 deletions
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
new file mode 100644
index 000000000..8e6a2441b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -0,0 +1,28 @@
+[package]
+name = "hir"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
+rust-version = "1.57"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+either = "1.7.0"
+arrayvec = "0.7.2"
+itertools = "0.10.3"
+smallvec = "1.9.0"
+once_cell = "1.12.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-ty = { path = "../hir-ty", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
new file mode 100644
index 000000000..0bd379340
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -0,0 +1,177 @@
+//! Attributes & documentation for hir types.
+
+use hir_def::{
+ attr::{AttrsWithOwner, Documentation},
+ item_scope::ItemInNs,
+ path::ModPath,
+ per_ns::PerNs,
+ resolver::HasResolver,
+ AttrDefId, GenericParamId, ModuleDefId,
+};
+use hir_expand::hygiene::Hygiene;
+use hir_ty::db::HirDatabase;
+use syntax::{ast, AstNode};
+
+use crate::{
+ Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
+ Macro, Module, ModuleDef, Static, Struct, Trait, TypeAlias, TypeParam, Union, Variant,
+};
+
+pub trait HasAttrs {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef>;
+}
+
+#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
+pub enum Namespace {
+ Types,
+ Values,
+ Macros,
+}
+
+macro_rules! impl_has_attrs {
+ ($(($def:ident, $def_id:ident),)*) => {$(
+ impl HasAttrs for $def {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ let def = AttrDefId::$def_id(self.into());
+ db.attrs(def)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ let def = AttrDefId::$def_id(self.into());
+ db.attrs(def).docs()
+ }
+ fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
+ let def = AttrDefId::$def_id(self.into());
+ resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
+ }
+ }
+ )*};
+}
+
+impl_has_attrs![
+ (Field, FieldId),
+ (Variant, EnumVariantId),
+ (Static, StaticId),
+ (Const, ConstId),
+ (Trait, TraitId),
+ (TypeAlias, TypeAliasId),
+ (Macro, MacroId),
+ (Function, FunctionId),
+ (Adt, AdtId),
+ (Module, ModuleId),
+ (GenericParam, GenericParamId),
+ (Impl, ImplId),
+];
+
+macro_rules! impl_has_attrs_enum {
+ ($($variant:ident),* for $enum:ident) => {$(
+ impl HasAttrs for $variant {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ $enum::$variant(self).attrs(db)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ $enum::$variant(self).docs(db)
+ }
+ fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
+ $enum::$variant(self).resolve_doc_path(db, link, ns)
+ }
+ }
+ )*};
+}
+
+impl_has_attrs_enum![Struct, Union, Enum for Adt];
+impl_has_attrs_enum![TypeParam, ConstParam, LifetimeParam for GenericParam];
+
+impl HasAttrs for AssocItem {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ match self {
+ AssocItem::Function(it) => it.attrs(db),
+ AssocItem::Const(it) => it.attrs(db),
+ AssocItem::TypeAlias(it) => it.attrs(db),
+ }
+ }
+
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ match self {
+ AssocItem::Function(it) => it.docs(db),
+ AssocItem::Const(it) => it.docs(db),
+ AssocItem::TypeAlias(it) => it.docs(db),
+ }
+ }
+
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef> {
+ match self {
+ AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
+ AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
+ AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
+ }
+ }
+}
+
+fn resolve_doc_path(
+ db: &dyn HirDatabase,
+ def: AttrDefId,
+ link: &str,
+ ns: Option<Namespace>,
+) -> Option<ModuleDefId> {
+ let resolver = match def {
+ AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
+ AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
+ AttrDefId::AdtId(it) => it.resolver(db.upcast()),
+ AttrDefId::FunctionId(it) => it.resolver(db.upcast()),
+ AttrDefId::EnumVariantId(it) => it.parent.resolver(db.upcast()),
+ AttrDefId::StaticId(it) => it.resolver(db.upcast()),
+ AttrDefId::ConstId(it) => it.resolver(db.upcast()),
+ AttrDefId::TraitId(it) => it.resolver(db.upcast()),
+ AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
+ AttrDefId::ImplId(it) => it.resolver(db.upcast()),
+ AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
+ AttrDefId::MacroId(it) => it.resolver(db.upcast()),
+ AttrDefId::GenericParamId(it) => match it {
+ GenericParamId::TypeParamId(it) => it.parent(),
+ GenericParamId::ConstParamId(it) => it.parent(),
+ GenericParamId::LifetimeParamId(it) => it.parent,
+ }
+ .resolver(db.upcast()),
+ };
+
+ let modpath = {
+ // FIXME: this is not how we should get a mod path here
+ let ast_path = ast::SourceFile::parse(&format!("type T = {};", link))
+ .syntax_node()
+ .descendants()
+ .find_map(ast::Path::cast)?;
+ if ast_path.to_string() != link {
+ return None;
+ }
+ ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())?
+ };
+
+ let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
+ let resolved = if resolved == PerNs::none() {
+ resolver.resolve_module_path_in_trait_assoc_items(db.upcast(), &modpath)?
+ } else {
+ resolved
+ };
+ match ns {
+ Some(Namespace::Types) => resolved.take_types(),
+ Some(Namespace::Values) => resolved.take_values(),
+ Some(Namespace::Macros) => resolved.take_macros().map(ModuleDefId::MacroId),
+ None => resolved.iter_items().next().map(|it| match it {
+ ItemInNs::Types(it) => it,
+ ItemInNs::Values(it) => it,
+ ItemInNs::Macros(it) => ModuleDefId::MacroId(it),
+ }),
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
new file mode 100644
index 000000000..e25d86784
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -0,0 +1,16 @@
+//! Re-exports various subcrates databases so that the calling code can depend
+//! only on `hir`. This breaks abstraction boundary a bit, it would be cool if
+//! we didn't do that.
+//!
+//! But we need this for at least LRU caching at the query level.
+pub use hir_def::db::*;
+pub use hir_expand::db::{
+ AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternMacroCallQuery,
+ MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, ParseMacroExpansionQuery,
+};
+pub use hir_ty::db::*;
+
+#[test]
+fn hir_database_is_object_safe() {
+ fn _assert_object_safe(_: &dyn HirDatabase) {}
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
new file mode 100644
index 000000000..6c6c11ea4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -0,0 +1,170 @@
+//! Re-export diagnostics such that clients of `hir` don't have to depend on
+//! low-level crates.
+//!
+//! This probably isn't the best way to do this -- ideally, diagnistics should
+//! be expressed in terms of hir types themselves.
+use base_db::CrateId;
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_def::path::ModPath;
+use hir_expand::{name::Name, HirFileId, InFile};
+use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange};
+
+use crate::{MacroKind, Type};
+
+macro_rules! diagnostics {
+ ($($diag:ident,)*) => {
+ pub enum AnyDiagnostic {$(
+ $diag(Box<$diag>),
+ )*}
+
+ $(
+ impl From<$diag> for AnyDiagnostic {
+ fn from(d: $diag) -> AnyDiagnostic {
+ AnyDiagnostic::$diag(Box::new(d))
+ }
+ }
+ )*
+ };
+}
+
+diagnostics![
+ BreakOutsideOfLoop,
+ InactiveCode,
+ IncorrectCase,
+ InvalidDeriveTarget,
+ MacroError,
+ MalformedDerive,
+ MismatchedArgCount,
+ MissingFields,
+ MissingMatchArms,
+ MissingUnsafe,
+ NoSuchField,
+ ReplaceFilterMapNextWithFindMap,
+ TypeMismatch,
+ UnimplementedBuiltinMacro,
+ UnresolvedExternCrate,
+ UnresolvedImport,
+ UnresolvedMacroCall,
+ UnresolvedModule,
+ UnresolvedProcMacro,
+];
+
+#[derive(Debug)]
+pub struct UnresolvedModule {
+ pub decl: InFile<AstPtr<ast::Module>>,
+ pub candidates: Box<[String]>,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedExternCrate {
+ pub decl: InFile<AstPtr<ast::ExternCrate>>,
+}
+
+#[derive(Debug)]
+pub struct UnresolvedImport {
+ pub decl: InFile<AstPtr<ast::UseTree>>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UnresolvedMacroCall {
+ pub macro_call: InFile<SyntaxNodePtr>,
+ pub precise_location: Option<TextRange>,
+ pub path: ModPath,
+ pub is_bang: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct InactiveCode {
+ pub node: InFile<SyntaxNodePtr>,
+ pub cfg: CfgExpr,
+ pub opts: CfgOptions,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UnresolvedProcMacro {
+ pub node: InFile<SyntaxNodePtr>,
+ /// If the diagnostic can be pinpointed more accurately than via `node`, this is the `TextRange`
+ /// to use instead.
+ pub precise_location: Option<TextRange>,
+ pub macro_name: Option<String>,
+ pub kind: MacroKind,
+ /// The crate id of the proc-macro this macro belongs to, or `None` if the proc-macro can't be found.
+ pub krate: CrateId,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroError {
+ pub node: InFile<SyntaxNodePtr>,
+ pub precise_location: Option<TextRange>,
+ pub message: String,
+}
+
+#[derive(Debug)]
+pub struct UnimplementedBuiltinMacro {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct InvalidDeriveTarget {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct MalformedDerive {
+ pub node: InFile<SyntaxNodePtr>,
+}
+
+#[derive(Debug)]
+pub struct NoSuchField {
+ pub field: InFile<AstPtr<ast::RecordExprField>>,
+}
+
+#[derive(Debug)]
+pub struct BreakOutsideOfLoop {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+}
+
+#[derive(Debug)]
+pub struct MissingUnsafe {
+ pub expr: InFile<AstPtr<ast::Expr>>,
+}
+
+#[derive(Debug)]
+pub struct MissingFields {
+ pub file: HirFileId,
+ pub field_list_parent: Either<AstPtr<ast::RecordExpr>, AstPtr<ast::RecordPat>>,
+ pub field_list_parent_path: Option<AstPtr<ast::Path>>,
+ pub missed_fields: Vec<Name>,
+}
+
+#[derive(Debug)]
+pub struct ReplaceFilterMapNextWithFindMap {
+ pub file: HirFileId,
+ /// This expression is the whole method chain up to and including `.filter_map(..).next()`.
+ pub next_expr: AstPtr<ast::Expr>,
+}
+
+#[derive(Debug)]
+pub struct MismatchedArgCount {
+ pub call_expr: InFile<AstPtr<ast::Expr>>,
+ pub expected: usize,
+ pub found: usize,
+}
+
+#[derive(Debug)]
+pub struct MissingMatchArms {
+ pub file: HirFileId,
+ pub match_expr: AstPtr<ast::Expr>,
+ pub uncovered_patterns: String,
+}
+
+#[derive(Debug)]
+pub struct TypeMismatch {
+ // FIXME: add mismatches in patterns as well
+ pub expr: InFile<AstPtr<ast::Expr>>,
+ pub expected: Type,
+ pub actual: Type,
+}
+
+pub use hir_ty::diagnostics::IncorrectCase;
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
new file mode 100644
index 000000000..0e29c52ad
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -0,0 +1,530 @@
+//! HirDisplay implementations for various hir types.
+use hir_def::{
+ adt::VariantData,
+ generics::{
+ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ },
+ type_ref::{TypeBound, TypeRef},
+ AdtId, GenericDefId,
+};
+use hir_ty::{
+ display::{
+ write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError,
+ HirFormatter, SizedByDefault,
+ },
+ Interner, TraitRefExt, WhereClause,
+};
+use syntax::SmolStr;
+
+use crate::{
+ Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility,
+ LifetimeParam, Macro, Module, Static, Struct, Trait, TyBuilder, Type, TypeAlias,
+ TypeOrConstParam, TypeParam, Union, Variant,
+};
+
+impl HirDisplay for Function {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = f.db.function_data(self.id);
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ if data.has_default_kw() {
+ f.write_str("default ")?;
+ }
+ if data.has_const_kw() {
+ f.write_str("const ")?;
+ }
+ if data.has_async_kw() {
+ f.write_str("async ")?;
+ }
+ if self.is_unsafe_to_call(f.db) {
+ f.write_str("unsafe ")?;
+ }
+ if let Some(abi) = &data.abi {
+ // FIXME: String escape?
+ write!(f, "extern \"{}\" ", &**abi)?;
+ }
+ write!(f, "fn {}", data.name)?;
+
+ write_generic_params(GenericDefId::FunctionId(self.id), f)?;
+
+ f.write_char('(')?;
+
+ let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
+ TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
+ TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner,TypeRef::Path(p) if p.is_self_type()) =>
+ {
+ f.write_char('&')?;
+ if let Some(lifetime) = lifetime {
+ write!(f, "{} ", lifetime.name)?;
+ }
+ if let hir_def::type_ref::Mutability::Mut = mut_ {
+ f.write_str("mut ")?;
+ }
+ f.write_str("self")
+ }
+ _ => {
+ f.write_str("self: ")?;
+ ty.hir_fmt(f)
+ }
+ };
+
+ let mut first = true;
+ for (name, type_ref) in &data.params {
+ if !first {
+ f.write_str(", ")?;
+ } else {
+ first = false;
+ if data.has_self_param() {
+ write_self_param(type_ref, f)?;
+ continue;
+ }
+ }
+ match name {
+ Some(name) => write!(f, "{}: ", name)?,
+ None => f.write_str("_: ")?,
+ }
+ // FIXME: Use resolved `param.ty` or raw `type_ref`?
+ // The former will ignore lifetime arguments currently.
+ type_ref.hir_fmt(f)?;
+ }
+
+ if data.is_varargs() {
+ f.write_str(", ...")?;
+ }
+
+ f.write_char(')')?;
+
+ // `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns.
+ // Use ugly pattern match to strip the Future trait.
+ // Better way?
+ let ret_type = if !data.has_async_kw() {
+ &data.ret_type
+ } else {
+ match &*data.ret_type {
+ TypeRef::ImplTrait(bounds) => match bounds[0].as_ref() {
+ TypeBound::Path(path, _) => {
+ path.segments().iter().last().unwrap().args_and_bindings.unwrap().bindings
+ [0]
+ .type_ref
+ .as_ref()
+ .unwrap()
+ }
+ _ => panic!("Async fn ret_type should be impl Future"),
+ },
+ _ => panic!("Async fn ret_type should be impl Future"),
+ }
+ };
+
+ match ret_type {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ ty => {
+ f.write_str(" -> ")?;
+ ty.hir_fmt(f)?;
+ }
+ }
+
+ write_where_clause(GenericDefId::FunctionId(self.id), f)?;
+
+ Ok(())
+ }
+}
+
+impl HirDisplay for Adt {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ Adt::Struct(it) => it.hir_fmt(f),
+ Adt::Union(it) => it.hir_fmt(f),
+ Adt::Enum(it) => it.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for Struct {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("struct ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Enum {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("enum ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Union {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("union ")?;
+ write!(f, "{}", self.name(f.db))?;
+ let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Field {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
+ write!(f, "{}: ", self.name(f.db))?;
+ self.ty(f.db).hir_fmt(f)
+ }
+}
+
+impl HirDisplay for Variant {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))?;
+ let data = self.variant_data(f.db);
+ match &*data {
+ VariantData::Unit => {}
+ VariantData::Tuple(fields) => {
+ f.write_char('(')?;
+ let mut first = true;
+ for (_, field) in fields.iter() {
+ if first {
+ first = false;
+ } else {
+ f.write_str(", ")?;
+ }
+ // Enum variant fields must be pub.
+ field.type_ref.hir_fmt(f)?;
+ }
+ f.write_char(')')?;
+ }
+ VariantData::Record(fields) => {
+ f.write_str(" {")?;
+ let mut first = true;
+ for (_, field) in fields.iter() {
+ if first {
+ first = false;
+ f.write_char(' ')?;
+ } else {
+ f.write_str(", ")?;
+ }
+ // Enum variant fields must be pub.
+ write!(f, "{}: ", field.name)?;
+ field.type_ref.hir_fmt(f)?;
+ }
+ f.write_str(" }")?;
+ }
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for Type {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ self.ty.hir_fmt(f)
+ }
+}
+
+impl HirDisplay for GenericParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self {
+ GenericParam::TypeParam(it) => it.hir_fmt(f),
+ GenericParam::ConstParam(it) => it.hir_fmt(f),
+ GenericParam::LifetimeParam(it) => it.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for TypeOrConstParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.split(f.db) {
+ either::Either::Left(x) => x.hir_fmt(f),
+ either::Either::Right(x) => x.hir_fmt(f),
+ }
+ }
+}
+
+impl HirDisplay for TypeParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))?;
+ if f.omit_verbose_types() {
+ return Ok(());
+ }
+
+ let bounds = f.db.generic_predicates_for_param(self.id.parent(), self.id.into(), None);
+ let substs = TyBuilder::placeholder_subst(f.db, self.id.parent());
+ let predicates: Vec<_> =
+ bounds.iter().cloned().map(|b| b.substitute(Interner, &substs)).collect();
+ let krate = self.id.parent().krate(f.db).id;
+ let sized_trait =
+ f.db.lang_item(krate, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() {
+ WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait,
+ _ => false,
+ });
+ let has_only_not_sized_bound = predicates.is_empty();
+ if !has_only_sized_bound || has_only_not_sized_bound {
+ let default_sized = SizedByDefault::Sized { anchor: krate };
+ write_bounds_like_dyn_trait_with_prefix(":", &predicates, default_sized, f)?;
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for LifetimeParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "{}", self.name(f.db))
+ }
+}
+
+impl HirDisplay for ConstParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write!(f, "const {}: ", self.name(f.db))?;
+ self.ty(f.db).hir_fmt(f)
+ }
+}
+
+fn write_generic_params(
+ def: GenericDefId,
+ f: &mut HirFormatter<'_>,
+) -> Result<(), HirDisplayError> {
+ let params = f.db.generic_params(def);
+ if params.lifetimes.is_empty()
+ && params.type_or_consts.iter().all(|x| x.1.const_param().is_none())
+ && params
+ .type_or_consts
+ .iter()
+ .filter_map(|x| x.1.type_param())
+ .all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
+ {
+ return Ok(());
+ }
+ f.write_char('<')?;
+
+ let mut first = true;
+ let mut delim = |f: &mut HirFormatter<'_>| {
+ if first {
+ first = false;
+ Ok(())
+ } else {
+ f.write_str(", ")
+ }
+ };
+ for (_, lifetime) in params.lifetimes.iter() {
+ delim(f)?;
+ write!(f, "{}", lifetime.name)?;
+ }
+ for (_, ty) in params.type_or_consts.iter() {
+ if let Some(name) = &ty.name() {
+ match ty {
+ TypeOrConstParamData::TypeParamData(ty) => {
+ if ty.provenance != TypeParamProvenance::TypeParamList {
+ continue;
+ }
+ delim(f)?;
+ write!(f, "{}", name)?;
+ if let Some(default) = &ty.default {
+ f.write_str(" = ")?;
+ default.hir_fmt(f)?;
+ }
+ }
+ TypeOrConstParamData::ConstParamData(c) => {
+ delim(f)?;
+ write!(f, "const {}: ", name)?;
+ c.ty.hir_fmt(f)?;
+ }
+ }
+ }
+ }
+
+ f.write_char('>')?;
+ Ok(())
+}
+
+fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let params = f.db.generic_params(def);
+
+ // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
+ let is_unnamed_type_target = |target: &WherePredicateTypeTarget| match target {
+ WherePredicateTypeTarget::TypeRef(_) => false,
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ params.type_or_consts[*id].name().is_none()
+ }
+ };
+
+ let has_displayable_predicate = params
+ .where_predicates
+ .iter()
+ .any(|pred| {
+ !matches!(pred, WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target))
+ });
+
+ if !has_displayable_predicate {
+ return Ok(());
+ }
+
+ let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
+ WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ match &params.type_or_consts[*id].name() {
+ Some(name) => write!(f, "{}", name),
+ None => f.write_str("{unnamed}"),
+ }
+ }
+ };
+
+ f.write_str("\nwhere")?;
+
+ for (pred_idx, pred) in params.where_predicates.iter().enumerate() {
+ let prev_pred =
+ if pred_idx == 0 { None } else { Some(&params.where_predicates[pred_idx - 1]) };
+
+ let new_predicate = |f: &mut HirFormatter<'_>| {
+ f.write_str(if pred_idx == 0 { "\n " } else { ",\n " })
+ };
+
+ match pred {
+ WherePredicate::TypeBound { target, .. } if is_unnamed_type_target(target) => {}
+ WherePredicate::TypeBound { target, bound } => {
+ if matches!(prev_pred, Some(WherePredicate::TypeBound { target: target_, .. }) if target_ == target)
+ {
+ f.write_str(" + ")?;
+ } else {
+ new_predicate(f)?;
+ write_target(target, f)?;
+ f.write_str(": ")?;
+ }
+ bound.hir_fmt(f)?;
+ }
+ WherePredicate::Lifetime { target, bound } => {
+ if matches!(prev_pred, Some(WherePredicate::Lifetime { target: target_, .. }) if target_ == target)
+ {
+ write!(f, " + {}", bound.name)?;
+ } else {
+ new_predicate(f)?;
+ write!(f, "{}: {}", target.name, bound.name)?;
+ }
+ }
+ WherePredicate::ForLifetime { lifetimes, target, bound } => {
+ if matches!(
+ prev_pred,
+ Some(WherePredicate::ForLifetime { lifetimes: lifetimes_, target: target_, .. })
+ if lifetimes_ == lifetimes && target_ == target,
+ ) {
+ f.write_str(" + ")?;
+ } else {
+ new_predicate(f)?;
+ f.write_str("for<")?;
+ for (idx, lifetime) in lifetimes.iter().enumerate() {
+ if idx != 0 {
+ f.write_str(", ")?;
+ }
+ write!(f, "{}", lifetime)?;
+ }
+ f.write_str("> ")?;
+ write_target(target, f)?;
+ f.write_str(": ")?;
+ }
+ bound.hir_fmt(f)?;
+ }
+ }
+ }
+
+ // End of final predicate. There must be at least one predicate here.
+ f.write_char(',')?;
+
+ Ok(())
+}
+
+impl HirDisplay for Const {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.const_data(self.id);
+ f.write_str("const ")?;
+ match &data.name {
+ Some(name) => write!(f, "{}: ", name)?,
+ None => f.write_str("_: ")?,
+ }
+ data.type_ref.hir_fmt(f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Static {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.static_data(self.id);
+ f.write_str("static ")?;
+ if data.mutable {
+ f.write_str("mut ")?;
+ }
+ write!(f, "{}: ", &data.name)?;
+ data.type_ref.hir_fmt(f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Trait {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.trait_data(self.id);
+ if data.is_unsafe {
+ f.write_str("unsafe ")?;
+ }
+ if data.is_auto {
+ f.write_str("auto ")?;
+ }
+ write!(f, "trait {}", data.name)?;
+ let def_id = GenericDefId::TraitId(self.id);
+ write_generic_params(def_id, f)?;
+ write_where_clause(def_id, f)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for TypeAlias {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ let data = f.db.type_alias_data(self.id);
+ write!(f, "type {}", data.name)?;
+ if !data.bounds.is_empty() {
+ f.write_str(": ")?;
+ f.write_joined(&data.bounds, " + ")?;
+ }
+ if let Some(ty) = &data.type_ref {
+ f.write_str(" = ")?;
+ ty.hir_fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for Module {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ // FIXME: Module doesn't have visibility saved in data.
+ match self.name(f.db) {
+ Some(name) => write!(f, "mod {}", name),
+ None if self.is_crate_root(f.db) => match self.krate(f.db).display_name(f.db) {
+ Some(name) => write!(f, "extern crate {}", name),
+ None => f.write_str("extern crate {unknown}"),
+ },
+ None => f.write_str("mod {unnamed}"),
+ }
+ }
+}
+
+impl HirDisplay for Macro {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ match self.id {
+ hir_def::MacroId::Macro2Id(_) => f.write_str("macro"),
+ hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
+ hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
+ }?;
+ write!(f, " {}", self.name(f.db))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
new file mode 100644
index 000000000..9c7558d19
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
@@ -0,0 +1,293 @@
+//! Utility module for converting between hir_def ids and code_model wrappers.
+//!
+//! It's unclear if we need this long-term, but it's definitely useful while we
+//! are splitting the hir.
+
+use hir_def::{
+ expr::{LabelId, PatId},
+ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, GenericDefId, GenericParamId,
+ ModuleDefId, VariantId,
+};
+
+use crate::{
+ Adt, AssocItem, BuiltinType, DefWithBody, Field, GenericDef, GenericParam, ItemInNs, Label,
+ Local, ModuleDef, Variant, VariantDef,
+};
+
+macro_rules! from_id {
+ ($(($id:path, $ty:path)),*) => {$(
+ impl From<$id> for $ty {
+ fn from(id: $id) -> $ty {
+ $ty { id }
+ }
+ }
+ impl From<$ty> for $id {
+ fn from(ty: $ty) -> $id {
+ ty.id
+ }
+ }
+ )*}
+}
+
+from_id![
+ (base_db::CrateId, crate::Crate),
+ (hir_def::ModuleId, crate::Module),
+ (hir_def::StructId, crate::Struct),
+ (hir_def::UnionId, crate::Union),
+ (hir_def::EnumId, crate::Enum),
+ (hir_def::TypeAliasId, crate::TypeAlias),
+ (hir_def::TraitId, crate::Trait),
+ (hir_def::StaticId, crate::Static),
+ (hir_def::ConstId, crate::Const),
+ (hir_def::FunctionId, crate::Function),
+ (hir_def::ImplId, crate::Impl),
+ (hir_def::TypeOrConstParamId, crate::TypeOrConstParam),
+ (hir_def::TypeParamId, crate::TypeParam),
+ (hir_def::ConstParamId, crate::ConstParam),
+ (hir_def::LifetimeParamId, crate::LifetimeParam),
+ (hir_def::MacroId, crate::Macro)
+];
+
+impl From<AdtId> for Adt {
+ fn from(id: AdtId) -> Self {
+ match id {
+ AdtId::StructId(it) => Adt::Struct(it.into()),
+ AdtId::UnionId(it) => Adt::Union(it.into()),
+ AdtId::EnumId(it) => Adt::Enum(it.into()),
+ }
+ }
+}
+
+impl From<Adt> for AdtId {
+ fn from(id: Adt) -> Self {
+ match id {
+ Adt::Struct(it) => AdtId::StructId(it.id),
+ Adt::Union(it) => AdtId::UnionId(it.id),
+ Adt::Enum(it) => AdtId::EnumId(it.id),
+ }
+ }
+}
+
+impl From<GenericParamId> for GenericParam {
+ fn from(id: GenericParamId) -> Self {
+ match id {
+ GenericParamId::TypeParamId(it) => GenericParam::TypeParam(it.into()),
+ GenericParamId::ConstParamId(it) => GenericParam::ConstParam(it.into()),
+ GenericParamId::LifetimeParamId(it) => GenericParam::LifetimeParam(it.into()),
+ }
+ }
+}
+
+impl From<GenericParam> for GenericParamId {
+ fn from(id: GenericParam) -> Self {
+ match id {
+ GenericParam::LifetimeParam(it) => GenericParamId::LifetimeParamId(it.id),
+ GenericParam::ConstParam(it) => GenericParamId::ConstParamId(it.id),
+ GenericParam::TypeParam(it) => GenericParamId::TypeParamId(it.id),
+ }
+ }
+}
+
+impl From<EnumVariantId> for Variant {
+ fn from(id: EnumVariantId) -> Self {
+ Variant { parent: id.parent.into(), id: id.local_id }
+ }
+}
+
+impl From<Variant> for EnumVariantId {
+ fn from(def: Variant) -> Self {
+ EnumVariantId { parent: def.parent.id, local_id: def.id }
+ }
+}
+
+impl From<ModuleDefId> for ModuleDef {
+ fn from(id: ModuleDefId) -> Self {
+ match id {
+ ModuleDefId::ModuleId(it) => ModuleDef::Module(it.into()),
+ ModuleDefId::FunctionId(it) => ModuleDef::Function(it.into()),
+ ModuleDefId::AdtId(it) => ModuleDef::Adt(it.into()),
+ ModuleDefId::EnumVariantId(it) => ModuleDef::Variant(it.into()),
+ ModuleDefId::ConstId(it) => ModuleDef::Const(it.into()),
+ ModuleDefId::StaticId(it) => ModuleDef::Static(it.into()),
+ ModuleDefId::TraitId(it) => ModuleDef::Trait(it.into()),
+ ModuleDefId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
+ ModuleDefId::BuiltinType(it) => ModuleDef::BuiltinType(it.into()),
+ ModuleDefId::MacroId(it) => ModuleDef::Macro(it.into()),
+ }
+ }
+}
+
+impl From<ModuleDef> for ModuleDefId {
+ fn from(id: ModuleDef) -> Self {
+ match id {
+ ModuleDef::Module(it) => ModuleDefId::ModuleId(it.into()),
+ ModuleDef::Function(it) => ModuleDefId::FunctionId(it.into()),
+ ModuleDef::Adt(it) => ModuleDefId::AdtId(it.into()),
+ ModuleDef::Variant(it) => ModuleDefId::EnumVariantId(it.into()),
+ ModuleDef::Const(it) => ModuleDefId::ConstId(it.into()),
+ ModuleDef::Static(it) => ModuleDefId::StaticId(it.into()),
+ ModuleDef::Trait(it) => ModuleDefId::TraitId(it.into()),
+ ModuleDef::TypeAlias(it) => ModuleDefId::TypeAliasId(it.into()),
+ ModuleDef::BuiltinType(it) => ModuleDefId::BuiltinType(it.into()),
+ ModuleDef::Macro(it) => ModuleDefId::MacroId(it.into()),
+ }
+ }
+}
+
+impl From<DefWithBody> for DefWithBodyId {
+ fn from(def: DefWithBody) -> Self {
+ match def {
+ DefWithBody::Function(it) => DefWithBodyId::FunctionId(it.id),
+ DefWithBody::Static(it) => DefWithBodyId::StaticId(it.id),
+ DefWithBody::Const(it) => DefWithBodyId::ConstId(it.id),
+ }
+ }
+}
+
+impl From<DefWithBodyId> for DefWithBody {
+ fn from(def: DefWithBodyId) -> Self {
+ match def {
+ DefWithBodyId::FunctionId(it) => DefWithBody::Function(it.into()),
+ DefWithBodyId::StaticId(it) => DefWithBody::Static(it.into()),
+ DefWithBodyId::ConstId(it) => DefWithBody::Const(it.into()),
+ }
+ }
+}
+
+impl From<AssocItemId> for AssocItem {
+ fn from(def: AssocItemId) -> Self {
+ match def {
+ AssocItemId::FunctionId(it) => AssocItem::Function(it.into()),
+ AssocItemId::TypeAliasId(it) => AssocItem::TypeAlias(it.into()),
+ AssocItemId::ConstId(it) => AssocItem::Const(it.into()),
+ }
+ }
+}
+
+impl From<GenericDef> for GenericDefId {
+ fn from(def: GenericDef) -> Self {
+ match def {
+ GenericDef::Function(it) => GenericDefId::FunctionId(it.id),
+ GenericDef::Adt(it) => GenericDefId::AdtId(it.into()),
+ GenericDef::Trait(it) => GenericDefId::TraitId(it.id),
+ GenericDef::TypeAlias(it) => GenericDefId::TypeAliasId(it.id),
+ GenericDef::Impl(it) => GenericDefId::ImplId(it.id),
+ GenericDef::Variant(it) => {
+ GenericDefId::EnumVariantId(EnumVariantId { parent: it.parent.id, local_id: it.id })
+ }
+ GenericDef::Const(it) => GenericDefId::ConstId(it.id),
+ }
+ }
+}
+
+impl From<GenericDefId> for GenericDef {
+ fn from(def: GenericDefId) -> Self {
+ match def {
+ GenericDefId::FunctionId(it) => GenericDef::Function(it.into()),
+ GenericDefId::AdtId(it) => GenericDef::Adt(it.into()),
+ GenericDefId::TraitId(it) => GenericDef::Trait(it.into()),
+ GenericDefId::TypeAliasId(it) => GenericDef::TypeAlias(it.into()),
+ GenericDefId::ImplId(it) => GenericDef::Impl(it.into()),
+ GenericDefId::EnumVariantId(it) => {
+ GenericDef::Variant(Variant { parent: it.parent.into(), id: it.local_id })
+ }
+ GenericDefId::ConstId(it) => GenericDef::Const(it.into()),
+ }
+ }
+}
+
+impl From<Adt> for GenericDefId {
+ fn from(id: Adt) -> Self {
+ match id {
+ Adt::Struct(it) => it.id.into(),
+ Adt::Union(it) => it.id.into(),
+ Adt::Enum(it) => it.id.into(),
+ }
+ }
+}
+
+impl From<VariantId> for VariantDef {
+ fn from(def: VariantId) -> Self {
+ match def {
+ VariantId::StructId(it) => VariantDef::Struct(it.into()),
+ VariantId::EnumVariantId(it) => VariantDef::Variant(it.into()),
+ VariantId::UnionId(it) => VariantDef::Union(it.into()),
+ }
+ }
+}
+
+impl From<VariantDef> for VariantId {
+ fn from(def: VariantDef) -> Self {
+ match def {
+ VariantDef::Struct(it) => VariantId::StructId(it.id),
+ VariantDef::Variant(it) => VariantId::EnumVariantId(it.into()),
+ VariantDef::Union(it) => VariantId::UnionId(it.id),
+ }
+ }
+}
+
+impl From<Field> for FieldId {
+ fn from(def: Field) -> Self {
+ FieldId { parent: def.parent.into(), local_id: def.id }
+ }
+}
+
+impl From<FieldId> for Field {
+ fn from(def: FieldId) -> Self {
+ Field { parent: def.parent.into(), id: def.local_id }
+ }
+}
+
+impl From<AssocItem> for GenericDefId {
+ fn from(item: AssocItem) -> Self {
+ match item {
+ AssocItem::Function(f) => f.id.into(),
+ AssocItem::Const(c) => c.id.into(),
+ AssocItem::TypeAlias(t) => t.id.into(),
+ }
+ }
+}
+
+impl From<(DefWithBodyId, PatId)> for Local {
+ fn from((parent, pat_id): (DefWithBodyId, PatId)) -> Self {
+ Local { parent, pat_id }
+ }
+}
+
+impl From<(DefWithBodyId, LabelId)> for Label {
+ fn from((parent, label_id): (DefWithBodyId, LabelId)) -> Self {
+ Label { parent, label_id }
+ }
+}
+
+impl From<hir_def::item_scope::ItemInNs> for ItemInNs {
+ fn from(it: hir_def::item_scope::ItemInNs) -> Self {
+ match it {
+ hir_def::item_scope::ItemInNs::Types(it) => ItemInNs::Types(it.into()),
+ hir_def::item_scope::ItemInNs::Values(it) => ItemInNs::Values(it.into()),
+ hir_def::item_scope::ItemInNs::Macros(it) => ItemInNs::Macros(it.into()),
+ }
+ }
+}
+
+impl From<ItemInNs> for hir_def::item_scope::ItemInNs {
+ fn from(it: ItemInNs) -> Self {
+ match it {
+ ItemInNs::Types(it) => Self::Types(it.into()),
+ ItemInNs::Values(it) => Self::Values(it.into()),
+ ItemInNs::Macros(it) => Self::Macros(it.into()),
+ }
+ }
+}
+
+impl From<hir_def::builtin_type::BuiltinType> for BuiltinType {
+ fn from(inner: hir_def::builtin_type::BuiltinType) -> Self {
+ Self { inner }
+ }
+}
+
+impl From<BuiltinType> for hir_def::builtin_type::BuiltinType {
+ fn from(it: BuiltinType) -> Self {
+ it.inner
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
new file mode 100644
index 000000000..f8b01db3e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -0,0 +1,174 @@
+//! Provides set of implementation for hir's objects that allows get back location in file.
+
+use either::Either;
+use hir_def::{
+ nameres::{ModuleOrigin, ModuleSource},
+ src::{HasChildSource, HasSource as _},
+ Lookup, MacroId, VariantId,
+};
+use hir_expand::InFile;
+use syntax::ast;
+
+use crate::{
+ db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam, Macro,
+ Module, Static, Struct, Trait, TypeAlias, TypeOrConstParam, Union, Variant,
+};
+
+pub trait HasSource {
+ type Ast;
+ /// Fetches the definition's source node.
+ /// Using [`crate::Semantics::source`] is preferred when working with [`crate::Semantics`],
+ /// as that caches the parsed file in the semantics' cache.
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
+}
+
+/// NB: Module is !HasSource, because it has two source nodes at the same time:
+/// definition and declaration.
+impl Module {
+ /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
+ pub fn definition_source(self, db: &dyn HirDatabase) -> InFile<ModuleSource> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].definition_source(db.upcast())
+ }
+
+ pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
+ let def_map = self.id.def_map(db.upcast());
+ match def_map[self.id.local_id].origin {
+ ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs,
+ _ => false,
+ }
+ }
+
+ pub fn is_inline(self, db: &dyn HirDatabase) -> bool {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].origin.is_inline()
+ }
+
+ /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
+ /// `None` for the crate root.
+ pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].declaration_source(db.upcast())
+ }
+}
+
+impl HasSource for Field {
+ type Ast = FieldSource;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let var = VariantId::from(self.parent);
+ let src = var.child_source(db.upcast());
+ let field_source = src.map(|it| match it[self.id].clone() {
+ Either::Left(it) => FieldSource::Pos(it),
+ Either::Right(it) => FieldSource::Named(it),
+ });
+ Some(field_source)
+ }
+}
+impl HasSource for Adt {
+ type Ast = ast::Adt;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ match self {
+ Adt::Struct(s) => Some(s.source(db)?.map(ast::Adt::Struct)),
+ Adt::Union(u) => Some(u.source(db)?.map(ast::Adt::Union)),
+ Adt::Enum(e) => Some(e.source(db)?.map(ast::Adt::Enum)),
+ }
+ }
+}
+impl HasSource for Struct {
+ type Ast = ast::Struct;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Union {
+ type Ast = ast::Union;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Enum {
+ type Ast = ast::Enum;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Variant {
+ type Ast = ast::Variant;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Variant>> {
+ Some(self.parent.id.child_source(db.upcast()).map(|map| map[self.id].clone()))
+ }
+}
+impl HasSource for Function {
+ type Ast = ast::Fn;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Const {
+ type Ast = ast::Const;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Static {
+ type Ast = ast::Static;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Trait {
+ type Ast = ast::Trait;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for TypeAlias {
+ type Ast = ast::TypeAlias;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+impl HasSource for Macro {
+ type Ast = Either<ast::Macro, ast::Fn>;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ match self.id {
+ MacroId::Macro2Id(it) => Some(
+ it.lookup(db.upcast())
+ .source(db.upcast())
+ .map(ast::Macro::MacroDef)
+ .map(Either::Left),
+ ),
+ MacroId::MacroRulesId(it) => Some(
+ it.lookup(db.upcast())
+ .source(db.upcast())
+ .map(ast::Macro::MacroRules)
+ .map(Either::Left),
+ ),
+ MacroId::ProcMacroId(it) => {
+ Some(it.lookup(db.upcast()).source(db.upcast()).map(Either::Right))
+ }
+ }
+ }
+}
+impl HasSource for Impl {
+ type Ast = ast::Impl;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
+
+impl HasSource for TypeOrConstParam {
+ type Ast = Either<ast::TypeOrConstParam, ast::Trait>;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let child_source = self.id.parent.child_source(db.upcast());
+ Some(child_source.map(|it| it[self.id.local_id].clone()))
+ }
+}
+
+impl HasSource for LifetimeParam {
+ type Ast = ast::LifetimeParam;
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ let child_source = self.id.parent.child_source(db.upcast());
+ Some(child_source.map(|it| it[self.id.local_id].clone()))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
new file mode 100644
index 000000000..8f984210e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -0,0 +1,3639 @@
+//! HIR (previously known as descriptors) provides a high-level object oriented
+//! access to Rust code.
+//!
+//! The principal difference between HIR and syntax trees is that HIR is bound
+//! to a particular crate instance. That is, it has cfg flags and features
+//! applied. So, the relation between syntax and HIR is many-to-one.
+//!
+//! HIR is the public API of the all of the compiler logic above syntax trees.
+//! It is written in "OO" style. Each type is self contained (as in, it knows it's
+//! parents and full context). It should be "clean code".
+//!
+//! `hir_*` crates are the implementation of the compiler logic.
+//! They are written in "ECS" style, with relatively little abstractions.
+//! Many types are not self-contained, and explicitly use local indexes, arenas, etc.
+//!
+//! `hir` is what insulates the "we don't know how to actually write an incremental compiler"
+//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
+//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![recursion_limit = "512"]
+
+mod semantics;
+mod source_analyzer;
+
+mod from_id;
+mod attrs;
+mod has_source;
+
+pub mod diagnostics;
+pub mod db;
+pub mod symbols;
+
+mod display;
+
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
+use either::Either;
+use hir_def::{
+ adt::{ReprKind, VariantData},
+ body::{BodyDiagnostic, SyntheticSyntax},
+ expr::{BindingAnnotation, LabelId, Pat, PatId},
+ generics::{TypeOrConstParamData, TypeParamProvenance},
+ item_tree::ItemTreeNode,
+ lang_item::LangItemTarget,
+ nameres::{self, diagnostics::DefDiagnostic},
+ per_ns::PerNs,
+ resolver::{HasResolver, Resolver},
+ src::HasSource as _,
+ AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+};
+use hir_expand::{name::name, MacroCallKind};
+use hir_ty::{
+ all_super_traits, autoderef,
+ consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt},
+ diagnostics::BodyValidationDiagnostic,
+ method_resolution::{self, TyFingerprint},
+ primitive::UintTy,
+ subst_prefix,
+ traits::FnTrait,
+ AliasEq, AliasTy, BoundVar, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast,
+ ClosureId, DebruijnIndex, GenericArgData, InEnvironment, Interner, ParamKind,
+ QuantifiedWhereClause, Scalar, Solution, Substitution, TraitEnvironment, TraitRefExt, Ty,
+ TyBuilder, TyDefId, TyExt, TyKind, TyVariableKind, WhereClause,
+};
+use itertools::Itertools;
+use nameres::diagnostics::DefDiagnosticKind;
+use once_cell::unsync::Lazy;
+use rustc_hash::FxHashSet;
+use stdx::{format_to, impl_from, never};
+use syntax::{
+ ast::{self, HasAttrs as _, HasDocComments, HasName},
+ AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
+};
+
+use crate::db::{DefDatabase, HirDatabase};
+
+pub use crate::{
+ attrs::{HasAttrs, Namespace},
+ diagnostics::{
+ AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
+ MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
+ MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch,
+ UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall,
+ UnresolvedModule, UnresolvedProcMacro,
+ },
+ has_source::HasSource,
+ semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+};
+
+// Be careful with these re-exports.
+//
+// `hir` is the boundary between the compiler and the IDE. It should try hard to
+// isolate the compiler from the ide, to allow the two to be refactored
+// independently. Re-exporting something from the compiler is the sure way to
+// breach the boundary.
+//
+// Generally, a refactoring which *removes* a name from this list is a good
+// idea!
+pub use {
+ cfg::{CfgAtom, CfgExpr, CfgOptions},
+ hir_def::{
+ adt::StructKind,
+ attr::{Attr, Attrs, AttrsWithOwner, Documentation},
+ builtin_attr::AttributeTemplate,
+ find_path::PrefixKind,
+ import_map,
+ nameres::ModuleSource,
+ path::{ModPath, PathKind},
+ type_ref::{Mutability, TypeRef},
+ visibility::Visibility,
+ },
+ hir_expand::{
+ name::{known, Name},
+ ExpandResult, HirFileId, InFile, MacroFile, Origin,
+ },
+ hir_ty::display::HirDisplay,
+};
+
+// These are negative re-exports: pub using these names is forbidden, they
+// should remain private to hir internals.
+#[allow(unused)]
+use {
+ hir_def::path::Path,
+ hir_expand::{hygiene::Hygiene, name::AsName},
+};
+
+/// hir::Crate describes a single crate. It's the main interface with which
+/// a crate's dependencies interact. Mostly, it should be just a proxy for the
+/// root module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Crate {
+ pub(crate) id: CrateId,
+}
+
+#[derive(Debug)]
+pub struct CrateDependency {
+ pub krate: Crate,
+ pub name: Name,
+}
+
+impl Crate {
+ pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin {
+ db.crate_graph()[self.id].origin.clone()
+ }
+
+ pub fn is_builtin(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.origin(db), CrateOrigin::Lang(_))
+ }
+
+ pub fn dependencies(self, db: &dyn HirDatabase) -> Vec<CrateDependency> {
+ db.crate_graph()[self.id]
+ .dependencies
+ .iter()
+ .map(|dep| {
+ let krate = Crate { id: dep.crate_id };
+ let name = dep.as_name();
+ CrateDependency { krate, name }
+ })
+ .collect()
+ }
+
+ pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
+ let crate_graph = db.crate_graph();
+ crate_graph
+ .iter()
+ .filter(|&krate| {
+ crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id)
+ })
+ .map(|id| Crate { id })
+ .collect()
+ }
+
+ pub fn transitive_reverse_dependencies(
+ self,
+ db: &dyn HirDatabase,
+ ) -> impl Iterator<Item = Crate> {
+ db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
+ }
+
+ pub fn root_module(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id);
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let def_map = db.crate_def_map(self.id);
+ def_map.modules().map(|(id, _)| def_map.module_id(id).into()).collect()
+ }
+
+ pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
+ db.crate_graph()[self.id].root_file_id
+ }
+
+ pub fn edition(self, db: &dyn HirDatabase) -> Edition {
+ db.crate_graph()[self.id].edition
+ }
+
+ pub fn version(self, db: &dyn HirDatabase) -> Option<String> {
+ db.crate_graph()[self.id].version.clone()
+ }
+
+ pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateDisplayName> {
+ db.crate_graph()[self.id].display_name.clone()
+ }
+
+ pub fn query_external_importables(
+ self,
+ db: &dyn DefDatabase,
+ query: import_map::Query,
+ ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
+ let _p = profile::span("query_external_importables");
+ import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| {
+ match ItemInNs::from(item) {
+ ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
+ ItemInNs::Macros(mac_id) => Either::Right(mac_id),
+ }
+ })
+ }
+
+ pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
+ db.crate_graph().iter().map(|id| Crate { id }).collect()
+ }
+
+ /// Try to get the root URL of the documentation of a crate.
+ pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
+ // Look for #![doc(html_root_url = "...")]
+ let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
+ let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
+ doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
+ }
+
+ pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].cfg_options.clone()
+ }
+
+ pub fn potential_cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].potential_cfg_options.clone()
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) id: ModuleId,
+}
+
+/// The defs which can be visible in the module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ModuleDef {
+ Module(Module),
+ Function(Function),
+ Adt(Adt),
+ // Can't be directly declared, but can be imported.
+ Variant(Variant),
+ Const(Const),
+ Static(Static),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ BuiltinType(BuiltinType),
+ Macro(Macro),
+}
+impl_from!(
+ Module,
+ Function,
+ Adt(Struct, Enum, Union),
+ Variant,
+ Const,
+ Static,
+ Trait,
+ TypeAlias,
+ BuiltinType,
+ Macro
+ for ModuleDef
+);
+
+impl From<VariantDef> for ModuleDef {
+ fn from(var: VariantDef) -> Self {
+ match var {
+ VariantDef::Struct(t) => Adt::from(t).into(),
+ VariantDef::Union(t) => Adt::from(t).into(),
+ VariantDef::Variant(t) => t.into(),
+ }
+ }
+}
+
+impl ModuleDef {
+ pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
+ match self {
+ ModuleDef::Module(it) => it.parent(db),
+ ModuleDef::Function(it) => Some(it.module(db)),
+ ModuleDef::Adt(it) => Some(it.module(db)),
+ ModuleDef::Variant(it) => Some(it.module(db)),
+ ModuleDef::Const(it) => Some(it.module(db)),
+ ModuleDef::Static(it) => Some(it.module(db)),
+ ModuleDef::Trait(it) => Some(it.module(db)),
+ ModuleDef::TypeAlias(it) => Some(it.module(db)),
+ ModuleDef::Macro(it) => Some(it.module(db)),
+ ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option<String> {
+ let mut segments = vec![self.name(db)?];
+ for m in self.module(db)?.path_to_root(db) {
+ segments.extend(m.name(db))
+ }
+ segments.reverse();
+ Some(segments.into_iter().join("::"))
+ }
+
+ pub fn canonical_module_path(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Option<impl Iterator<Item = Module>> {
+ self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let name = match self {
+ ModuleDef::Module(it) => it.name(db)?,
+ ModuleDef::Const(it) => it.name(db)?,
+ ModuleDef::Adt(it) => it.name(db),
+ ModuleDef::Trait(it) => it.name(db),
+ ModuleDef::Function(it) => it.name(db),
+ ModuleDef::Variant(it) => it.name(db),
+ ModuleDef::TypeAlias(it) => it.name(db),
+ ModuleDef::Static(it) => it.name(db),
+ ModuleDef::Macro(it) => it.name(db),
+ ModuleDef::BuiltinType(it) => it.name(),
+ };
+ Some(name)
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase) -> Vec<AnyDiagnostic> {
+ let id = match self {
+ ModuleDef::Adt(it) => match it {
+ Adt::Struct(it) => it.id.into(),
+ Adt::Enum(it) => it.id.into(),
+ Adt::Union(it) => it.id.into(),
+ },
+ ModuleDef::Trait(it) => it.id.into(),
+ ModuleDef::Function(it) => it.id.into(),
+ ModuleDef::TypeAlias(it) => it.id.into(),
+ ModuleDef::Module(it) => it.id.into(),
+ ModuleDef::Const(it) => it.id.into(),
+ ModuleDef::Static(it) => it.id.into(),
+ _ => return Vec::new(),
+ };
+
+ let module = match self.module(db) {
+ Some(it) => it,
+ None => return Vec::new(),
+ };
+
+ let mut acc = Vec::new();
+
+ match self.as_def_with_body() {
+ Some(def) => {
+ def.diagnostics(db, &mut acc);
+ }
+ None => {
+ for diag in hir_ty::diagnostics::incorrect_case(db, module.id.krate(), id) {
+ acc.push(diag.into())
+ }
+ }
+ }
+
+ acc
+ }
+
+ pub fn as_def_with_body(self) -> Option<DefWithBody> {
+ match self {
+ ModuleDef::Function(it) => Some(it.into()),
+ ModuleDef::Const(it) => Some(it.into()),
+ ModuleDef::Static(it) => Some(it.into()),
+
+ ModuleDef::Module(_)
+ | ModuleDef::Adt(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Trait(_)
+ | ModuleDef::TypeAlias(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ Some(match self {
+ ModuleDef::Module(it) => it.attrs(db),
+ ModuleDef::Function(it) => it.attrs(db),
+ ModuleDef::Adt(it) => it.attrs(db),
+ ModuleDef::Variant(it) => it.attrs(db),
+ ModuleDef::Const(it) => it.attrs(db),
+ ModuleDef::Static(it) => it.attrs(db),
+ ModuleDef::Trait(it) => it.attrs(db),
+ ModuleDef::TypeAlias(it) => it.attrs(db),
+ ModuleDef::Macro(it) => it.attrs(db),
+ ModuleDef::BuiltinType(_) => return None,
+ })
+ }
+}
+
+impl HasVisibility for ModuleDef {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match *self {
+ ModuleDef::Module(it) => it.visibility(db),
+ ModuleDef::Function(it) => it.visibility(db),
+ ModuleDef::Adt(it) => it.visibility(db),
+ ModuleDef::Const(it) => it.visibility(db),
+ ModuleDef::Static(it) => it.visibility(db),
+ ModuleDef::Trait(it) => it.visibility(db),
+ ModuleDef::TypeAlias(it) => it.visibility(db),
+ ModuleDef::Variant(it) => it.visibility(db),
+ ModuleDef::Macro(it) => it.visibility(db),
+ ModuleDef::BuiltinType(_) => Visibility::Public,
+ }
+ }
+}
+
+impl Module {
+ /// Name of this module.
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let def_map = self.id.def_map(db.upcast());
+ let parent = def_map[self.id.local_id].parent?;
+ def_map[parent].children.iter().find_map(|(name, module_id)| {
+ if *module_id == self.id.local_id {
+ Some(name.clone())
+ } else {
+ None
+ }
+ })
+ }
+
+ /// Returns the crate this module is part of.
+ pub fn krate(self) -> Crate {
+ Crate { id: self.id.krate() }
+ }
+
+ /// Topmost parent of this module. Every module has a `crate_root`, but some
+ /// might be missing `krate`. This can happen if a module's file is not included
+ /// in the module tree of any target in `Cargo.toml`.
+ pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id.krate());
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn is_crate_root(self, db: &dyn HirDatabase) -> bool {
+ let def_map = db.crate_def_map(self.id.krate());
+ def_map.root() == self.id.local_id
+ }
+
+ /// Iterates over all child modules.
+ pub fn children(self, db: &dyn HirDatabase) -> impl Iterator<Item = Module> {
+ let def_map = self.id.def_map(db.upcast());
+ let children = def_map[self.id.local_id]
+ .children
+ .iter()
+ .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) })
+ .collect::<Vec<_>>();
+ children.into_iter()
+ }
+
+ /// Finds a parent module.
+ pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
+ // FIXME: handle block expressions as modules (their parent is in a different DefMap)
+ let def_map = self.id.def_map(db.upcast());
+ let parent_id = def_map[self.id.local_id].parent?;
+ Some(Module { id: def_map.module_id(parent_id) })
+ }
+
+ pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let mut res = vec![self];
+ let mut curr = self;
+ while let Some(next) = curr.parent(db) {
+ res.push(next);
+ curr = next
+ }
+ res
+ }
+
+ /// Returns a `ModuleScope`: a set of items, visible in this module.
+ pub fn scope(
+ self,
+ db: &dyn HirDatabase,
+ visible_from: Option<Module>,
+ ) -> Vec<(Name, ScopeDef)> {
+ self.id.def_map(db.upcast())[self.id.local_id]
+ .scope
+ .entries()
+ .filter_map(|(name, def)| {
+ if let Some(m) = visible_from {
+ let filtered =
+ def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id));
+ if filtered.is_none() && !def.is_none() {
+ None
+ } else {
+ Some((name, filtered))
+ }
+ } else {
+ Some((name, def))
+ }
+ })
+ .flat_map(|(name, def)| {
+ ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item))
+ })
+ .collect()
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let _p = profile::span("Module::diagnostics").detail(|| {
+ format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string()))
+ });
+ let def_map = self.id.def_map(db.upcast());
+ for diag in def_map.diagnostics() {
+ if diag.in_module != self.id.local_id {
+ // FIXME: This is accidentally quadratic.
+ continue;
+ }
+ emit_def_diagnostic(db, acc, diag);
+ }
+ for decl in self.declarations(db) {
+ match decl {
+ ModuleDef::Module(m) => {
+ // Only add diagnostics from inline modules
+ if def_map[m.id.local_id].origin.is_inline() {
+ m.diagnostics(db, acc)
+ }
+ }
+ _ => acc.extend(decl.diagnostics(db)),
+ }
+ }
+
+ for impl_def in self.impl_defs(db) {
+ for item in impl_def.items(db) {
+ let def: DefWithBody = match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::TypeAlias(_) => continue,
+ };
+
+ def.diagnostics(db, acc);
+ }
+ }
+ }
+
+ pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope
+ .declarations()
+ .map(ModuleDef::from)
+ .chain(scope.unnamed_consts().map(|id| ModuleDef::Const(Const::from(id))))
+ .collect()
+ }
+
+ pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect()
+ }
+
+ pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].scope.impls().map(Impl::from).collect()
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible.
+ pub fn find_use_path(self, db: &dyn DefDatabase, item: impl Into<ItemInNs>) -> Option<ModPath> {
+ hir_def::find_path::find_path(db, item.into().into(), self.into())
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible. This is used for returning import paths for use-statements.
+ pub fn find_use_path_prefixed(
+ self,
+ db: &dyn DefDatabase,
+ item: impl Into<ItemInNs>,
+ prefix_kind: PrefixKind,
+ ) -> Option<ModPath> {
+ hir_def::find_path::find_path_prefixed(db, item.into().into(), self.into(), prefix_kind)
+ }
+}
+
+fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
+ match &diag.kind {
+ DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
+ let decl = declaration.to_node(db.upcast());
+ acc.push(
+ UnresolvedModule {
+ decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
+ candidates: candidates.clone(),
+ }
+ .into(),
+ )
+ }
+ DefDiagnosticKind::UnresolvedExternCrate { ast } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedImport { id, index } => {
+ let file_id = id.file_id();
+ let item_tree = id.item_tree(db.upcast());
+ let import = &item_tree[id.value];
+
+ let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
+ acc.push(
+ UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ InactiveCode {
+ node: ast.with_value(AstPtr::new(&item).into()),
+ cfg: cfg.clone(),
+ opts: opts.clone(),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
+ let (node, precise_location, macro_name, kind) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedProcMacro { node, precise_location, macro_name, kind, krate: *krate }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedMacroCall {
+ macro_call: node,
+ precise_location,
+ path: path.clone(),
+ is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::MacroError { ast, message } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(MacroError { node, precise_location, message: message.clone() }.into());
+ }
+
+ DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
+ let node = ast.to_node(db.upcast());
+ // Must have a name, otherwise we wouldn't emit it.
+ let name = node.name().expect("unimplemented builtin macro with no name");
+ acc.push(
+ UnimplementedBuiltinMacro {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&name))),
+ }
+ .into(),
+ );
+ }
+ DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ InvalidDeriveTarget {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ DefDiagnosticKind::MalformedDerive { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ MalformedDerive {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ }
+}
+
+fn precise_macro_call_location(
+ ast: &MacroCallKind,
+ db: &dyn HirDatabase,
+) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
+ // FIXME: maaybe we actually want slightly different ranges for the different macro diagnostics
+ // - e.g. the full attribute for macro errors, but only the name for name resolution
+ match ast {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ node.path()
+ .and_then(|it| it.segment())
+ .and_then(|it| it.name_ref())
+ .map(|it| it.syntax().text_range()),
+ node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
+ MacroKind::ProcMacro,
+ )
+ }
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
+ let node = ast_id.to_node(db.upcast());
+ // Compute the precise location of the macro name's token in the derive
+ // list.
+ let token = (|| {
+ let derive_attr = node
+ .doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(Either::left)?;
+ let token_tree = derive_attr.meta()?.token_tree()?;
+ let group_by = token_tree
+ .syntax()
+ .children_with_tokens()
+ .filter_map(|elem| match elem {
+ syntax::NodeOrToken::Token(tok) => Some(tok),
+ _ => None,
+ })
+ .group_by(|t| t.kind() == T![,]);
+ let (_, mut group) = group_by
+ .into_iter()
+ .filter(|&(comma, _)| !comma)
+ .nth(*derive_index as usize)?;
+ group.find(|t| t.kind() == T![ident])
+ })();
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ token.as_ref().map(|tok| tok.text_range()),
+ token.as_ref().map(ToString::to_string),
+ MacroKind::Derive,
+ )
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ let attr = node
+ .doc_comments_and_attrs()
+ .nth((*invoc_attr_index) as usize)
+ .and_then(Either::left)
+ .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
+
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
+ Some(attr.syntax().text_range()),
+ attr.path()
+ .and_then(|path| path.segment())
+ .and_then(|seg| seg.name_ref())
+ .as_ref()
+ .map(ToString::to_string),
+ MacroKind::Attr,
+ )
+ }
+ }
+}
+
+impl HasVisibility for Module {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let def_map = self.id.def_map(db.upcast());
+ let module_data = &def_map[self.id.local_id];
+ module_data.visibility
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Field {
+ pub(crate) parent: VariantDef,
+ pub(crate) id: LocalFieldId,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum FieldSource {
+ Named(ast::RecordField),
+ Pos(ast::TupleField),
+}
+
+impl Field {
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ self.parent.variant_data(db).fields()[self.id].name.clone()
+ }
+
+ /// Returns the type as in the signature of the struct (i.e., with
+ /// placeholder types for type parameters). Only use this in the context of
+ /// the field definition.
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let var_id = self.parent.into();
+ let generic_def_id: GenericDefId = match self.parent {
+ VariantDef::Struct(it) => it.id.into(),
+ VariantDef::Union(it) => it.id.into(),
+ VariantDef::Variant(it) => it.parent.id.into(),
+ };
+ let substs = TyBuilder::placeholder_subst(db, generic_def_id);
+ let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
+ Type::new(db, var_id, ty)
+ }
+
+ pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
+ self.parent
+ }
+}
+
+impl HasVisibility for Field {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let variant_data = self.parent.variant_data(db);
+ let visibility = &variant_data.fields()[self.id].visibility;
+ let parent_id: hir_def::VariantId = self.parent.into();
+ visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) id: StructId,
+}
+
+impl Struct {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.struct_data(self.id).name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.struct_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprKind> {
+ db.struct_data(self.id).repr.clone()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.struct_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Struct {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.struct_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) id: UnionId,
+}
+
+impl Union {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.union_data(self.id).name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.union_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.union_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Union {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.union_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) id: EnumId,
+}
+
+impl Enum {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.id).name.clone()
+ }
+
+ pub fn variants(self, db: &dyn HirDatabase) -> Vec<Variant> {
+ db.enum_data(self.id).variants.iter().map(|(id, _)| Variant { parent: self, id }).collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+}
+
+impl HasVisibility for Enum {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.enum_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) parent: Enum,
+ pub(crate) id: LocalEnumVariantId,
+}
+
+impl Variant {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent.module(db)
+ }
+
+ pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum {
+ self.parent
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.parent.id).variants[self.id].name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ self.variant_data(db)
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
+ }
+}
+
+/// Variants inherit visibility from the parent enum.
+impl HasVisibility for Variant {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ self.parent_enum(db).visibility(db)
+ }
+}
+
+/// A Data Type
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Struct(Struct),
+ Union(Union),
+ Enum(Enum),
+}
+impl_from!(Struct, Union, Enum for Adt);
+
+impl Adt {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ /// Turns this ADT into a type. Any type parameters of the ADT will be
+ /// turned into unknown types, which is good for e.g. finding the most
+ /// general set of completions, but will not look very nice when printed.
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let id = AdtId::from(self);
+ Type::from_def(db, id)
+ }
+
+ /// Turns this ADT into a type with the given type parameters. This isn't
+ /// the greatest API, FIXME find a better one.
+ pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type {
+ let id = AdtId::from(self);
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let ty = TyBuilder::def_ty(db, id.into())
+ .fill(|x| {
+ let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ Type::new(db, id, ty)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ Adt::Struct(s) => s.module(db),
+ Adt::Union(s) => s.module(db),
+ Adt::Enum(e) => e.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ Adt::Struct(s) => s.name(db),
+ Adt::Union(u) => u.name(db),
+ Adt::Enum(e) => e.name(db),
+ }
+ }
+
+ pub fn as_enum(&self) -> Option<Enum> {
+ if let Self::Enum(v) = self {
+ Some(*v)
+ } else {
+ None
+ }
+ }
+}
+
+impl HasVisibility for Adt {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ Adt::Struct(it) => it.visibility(db),
+ Adt::Union(it) => it.visibility(db),
+ Adt::Enum(it) => it.visibility(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum VariantDef {
+ Struct(Struct),
+ Union(Union),
+ Variant(Variant),
+}
+impl_from!(Struct, Union, Variant for VariantDef);
+
+impl VariantDef {
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ match self {
+ VariantDef::Struct(it) => it.fields(db),
+ VariantDef::Union(it) => it.fields(db),
+ VariantDef::Variant(it) => it.fields(db),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ VariantDef::Struct(it) => it.module(db),
+ VariantDef::Union(it) => it.module(db),
+ VariantDef::Variant(it) => it.module(db),
+ }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self {
+ VariantDef::Struct(s) => s.name(db),
+ VariantDef::Union(u) => u.name(db),
+ VariantDef::Variant(e) => e.name(db),
+ }
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ match self {
+ VariantDef::Struct(it) => it.variant_data(db),
+ VariantDef::Union(it) => it.variant_data(db),
+ VariantDef::Variant(it) => it.variant_data(db),
+ }
+ }
+}
+
+/// The defs which have a body.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum DefWithBody {
+ Function(Function),
+ Static(Static),
+ Const(Const),
+}
+impl_from!(Function, Const, Static for DefWithBody);
+
+impl DefWithBody {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ DefWithBody::Const(c) => c.module(db),
+ DefWithBody::Function(f) => f.module(db),
+ DefWithBody::Static(s) => s.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ DefWithBody::Function(f) => Some(f.name(db)),
+ DefWithBody::Static(s) => Some(s.name(db)),
+ DefWithBody::Const(c) => c.name(db),
+ }
+ }
+
+ /// Returns the type this def's body has to evaluate to.
+ pub fn body_type(self, db: &dyn HirDatabase) -> Type {
+ match self {
+ DefWithBody::Function(it) => it.ret_type(db),
+ DefWithBody::Static(it) => it.ty(db),
+ DefWithBody::Const(it) => it.ty(db),
+ }
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let krate = self.module(db).id.krate();
+
+ let (body, source_map) = db.body_with_source_map(self.into());
+
+ for (_, def_map) in body.blocks(db.upcast()) {
+ for diag in def_map.diagnostics() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+
+ for diag in source_map.diagnostics() {
+ match diag {
+ BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
+ InactiveCode { node: node.clone(), cfg: cfg.clone(), opts: opts.clone() }
+ .into(),
+ ),
+ BodyDiagnostic::MacroError { node, message } => acc.push(
+ MacroError {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ message: message.to_string(),
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
+ UnresolvedProcMacro {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ macro_name: None,
+ kind: MacroKind::ProcMacro,
+ krate: *krate,
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
+ UnresolvedMacroCall {
+ macro_call: node.clone().map(|ast_ptr| ast_ptr.into()),
+ precise_location: None,
+ path: path.clone(),
+ is_bang: true,
+ }
+ .into(),
+ ),
+ }
+ }
+
+ let infer = db.infer(self.into());
+ let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
+ for d in &infer.diagnostics {
+ match d {
+ hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
+ let field = source_map.field_syntax(*expr);
+ acc.push(NoSuchField { field }.into())
+ }
+ hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
+ let expr = source_map
+ .expr_syntax(*expr)
+ .expect("break outside of loop in synthetic syntax");
+ acc.push(BreakOutsideOfLoop { expr }.into())
+ }
+ hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
+ match source_map.expr_syntax(*call_expr) {
+ Ok(source_ptr) => acc.push(
+ MismatchedArgCount {
+ call_expr: source_ptr,
+ expected: *expected,
+ found: *found,
+ }
+ .into(),
+ ),
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+ for (expr, mismatch) in infer.expr_type_mismatches() {
+ let expr = match source_map.expr_syntax(expr) {
+ Ok(expr) => expr,
+ Err(SyntheticSyntax) => continue,
+ };
+ acc.push(
+ TypeMismatch {
+ expr,
+ expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
+ actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
+ }
+ .into(),
+ );
+ }
+
+ for expr in hir_ty::diagnostics::missing_unsafe(db, self.into()) {
+ match source_map.expr_syntax(expr) {
+ Ok(expr) => acc.push(MissingUnsafe { expr }.into()),
+ Err(SyntheticSyntax) => {
+ // FIXME: Here and eslwhere in this file, the `expr` was
+ // desugared, report or assert that this doesn't happen.
+ }
+ }
+ }
+
+ for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
+ match diagnostic {
+ BodyValidationDiagnostic::RecordMissingFields {
+ record,
+ variant,
+ missed_fields,
+ } => {
+ let variant_data = variant.variant_data(db.upcast());
+ let missed_fields = missed_fields
+ .into_iter()
+ .map(|idx| variant_data.fields()[idx].name.clone())
+ .collect();
+
+ match record {
+ Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::RecordExpr(record_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if record_expr.record_expr_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Left(AstPtr::new(
+ record_expr,
+ )),
+ field_list_parent_path: record_expr
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
+ Ok(source_ptr) => {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
+ if record_pat.record_pat_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Right(AstPtr::new(
+ &record_pat,
+ )),
+ field_list_parent_path: record_pat
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ }
+ }
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
+ if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
+ acc.push(
+ ReplaceFilterMapNextWithFindMap {
+ file: next_source_ptr.file_id,
+ next_expr: next_source_ptr.value,
+ }
+ .into(),
+ );
+ }
+ }
+ BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
+ match source_map.expr_syntax(match_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::MatchExpr(match_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if let Some(match_expr) = match_expr.expr() {
+ acc.push(
+ MissingMatchArms {
+ file: source_ptr.file_id,
+ match_expr: AstPtr::new(&match_expr),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+
+ let def: ModuleDef = match self {
+ DefWithBody::Function(it) => it.into(),
+ DefWithBody::Static(it) => it.into(),
+ DefWithBody::Const(it) => it.into(),
+ };
+ for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
+ acc.push(diag.into())
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Function {
+ pub(crate) id: FunctionId,
+}
+
+impl Function {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).module(db.upcast()).into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.function_data(self.id).name.clone()
+ }
+
+ /// Get this function's return type
+ pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ty = callable_sig.ret().clone();
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
+ if !self.is_async(db) {
+ return None;
+ }
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ret_ty = callable_sig.ret().clone();
+ for pred in ret_ty.impl_trait_bounds(db).into_iter().flatten() {
+ if let WhereClause::AliasEq(output_eq) = pred.into_value_and_skipped_binders().0 {
+ return Type::new_with_resolver_inner(db, &resolver, output_eq.ty).into();
+ }
+ }
+ never!("Async fn ret_type should be impl Future");
+ None
+ }
+
+ pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_self_param()
+ }
+
+ pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ self.has_self_param(db).then(|| SelfParam { func: self.id })
+ }
+
+ pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
+ if self.self_param(db).is_none() {
+ return None;
+ }
+ Some(self.params_without_self(db))
+ }
+
+ pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .skip(skip)
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn is_const(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_const_kw()
+ }
+
+ pub fn is_async(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_async_kw()
+ }
+
+ pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
+ hir_ty::is_fn_unsafe_to_call(db, self.id)
+ }
+
+ /// Whether this function declaration has a definition.
+ ///
+ /// This is false in the case of required (not provided) trait methods.
+ pub fn has_body(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_body()
+ }
+
+ pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
+ let function_data = db.function_data(self.id);
+ let attrs = &function_data.attrs;
+ // FIXME: Store this in FunctionData flags?
+ if !(attrs.is_proc_macro()
+ || attrs.is_proc_macro_attribute()
+ || attrs.is_proc_macro_derive())
+ {
+ return None;
+ }
+ let loc = self.id.lookup(db.upcast());
+ let def_map = db.crate_def_map(loc.krate(db).into());
+ def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
+ }
+
+ /// A textual representation of the HIR of this function for debugging purposes.
+ pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
+ let body = db.body(self.id.into());
+
+ let mut result = String::new();
+ format_to!(result, "HIR expressions in the body of `{}`:\n", self.name(db));
+ for (id, expr) in body.exprs.iter() {
+ format_to!(result, "{:?}: {:?}\n", id, expr);
+ }
+
+ result
+ }
+}
+
+// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum Access {
+ Shared,
+ Exclusive,
+ Owned,
+}
+
+impl From<hir_ty::Mutability> for Access {
+ fn from(mutability: hir_ty::Mutability) -> Access {
+ match mutability {
+ hir_ty::Mutability::Not => Access::Shared,
+ hir_ty::Mutability::Mut => Access::Exclusive,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Param {
+ func: Function,
+ /// The index in parameter list, including self parameter.
+ idx: usize,
+ ty: Type,
+}
+
+impl Param {
+ pub fn ty(&self) -> &Type {
+ &self.ty
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
+ db.function_data(self.func.id).params[self.idx].0.clone()
+ }
+
+ pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
+ let parent = DefWithBodyId::FunctionId(self.func.into());
+ let body = db.body(parent);
+ let pat_id = body.params[self.idx];
+ if let Pat::Bind { .. } = &body[pat_id] {
+ Some(Local { parent, pat_id: body.params[self.idx] })
+ } else {
+ None
+ }
+ }
+
+ pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
+ self.source(db).and_then(|p| p.value.pat())
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Param>> {
+ let InFile { file_id, value } = self.func.source(db)?;
+ let params = value.param_list()?;
+ if params.self_param().is_some() {
+ params.params().nth(self.idx.checked_sub(1)?)
+ } else {
+ params.params().nth(self.idx)
+ }
+ .map(|value| InFile { file_id, value })
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ func: FunctionId,
+}
+
+impl SelfParam {
+ pub fn access(self, db: &dyn HirDatabase) -> Access {
+ let func_data = db.function_data(self.func);
+ func_data
+ .params
+ .first()
+ .map(|(_, param)| match &**param {
+ TypeRef::Reference(.., mutability) => match mutability {
+ hir_def::type_ref::Mutability::Shared => Access::Shared,
+ hir_def::type_ref::Mutability::Mut => Access::Exclusive,
+ },
+ _ => Access::Owned,
+ })
+ .unwrap_or(Access::Owned)
+ }
+
+ pub fn display(self, db: &dyn HirDatabase) -> &'static str {
+ match self.access(db) {
+ Access::Shared => "&self",
+ Access::Exclusive => "&mut self",
+ Access::Owned => "self",
+ }
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
+ let InFile { file_id, value } = Function::from(self.func).source(db)?;
+ value
+ .param_list()
+ .and_then(|params| params.self_param())
+ .map(|value| InFile { file_id, value })
+ }
+
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let substs = TyBuilder::placeholder_subst(db, self.func);
+ let callable_sig =
+ db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
+ let environment = db.trait_environment(self.func.into());
+ let ty = callable_sig.params()[0].clone();
+ Type { env: environment, ty }
+ }
+}
+
+impl HasVisibility for Function {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.function_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) id: ConstId,
+}
+
+impl Const {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ db.const_data(self.id).name.clone()
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.const_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
+ db.const_eval(self.id)
+ }
+}
+
+impl HasVisibility for Const {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.const_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) id: StaticId,
+}
+
+impl Static {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.static_data(self.id).name.clone()
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ db.static_data(self.id).mutable
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.static_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+}
+
+impl HasVisibility for Static {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.static_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) id: TraitId,
+}
+
+impl Trait {
+ pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
+ db.lang_item(krate.into(), name.to_smol_str())
+ .and_then(LangItemTarget::as_trait)
+ .map(Into::into)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.trait_data(self.id).name.clone()
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
+ }
+
+ pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ let traits = all_super_traits(db.upcast(), self.into());
+ traits.iter().flat_map(|tr| Trait::from(*tr).items(db)).collect()
+ }
+
+ pub fn is_auto(self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_auto
+ }
+
+ pub fn is_unsafe(&self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_unsafe
+ }
+
+ pub fn type_or_const_param_count(
+ &self,
+ db: &dyn HirDatabase,
+ count_required_only: bool,
+ ) -> usize {
+ db.generic_params(GenericDefId::from(self.id))
+ .type_or_consts
+ .iter()
+ .filter(|(_, ty)| match ty {
+ TypeOrConstParamData::TypeParamData(ty)
+ if ty.provenance != TypeParamProvenance::TypeParamList =>
+ {
+ false
+ }
+ _ => true,
+ })
+ .filter(|(_, ty)| !count_required_only || !ty.has_default())
+ .count()
+ }
+}
+
+impl HasVisibility for Trait {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.trait_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) id: TypeAliasId,
+}
+
+impl TypeAlias {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.id.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> {
+ db.type_alias_data(self.id).type_ref.as_deref().cloned()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.type_alias_data(self.id).name.clone()
+ }
+}
+
+impl HasVisibility for TypeAlias {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let function_data = db.type_alias_data(self.id);
+ let visibility = &function_data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct BuiltinType {
+ pub(crate) inner: hir_def::builtin_type::BuiltinType,
+}
+
+impl BuiltinType {
+ pub fn str() -> BuiltinType {
+ BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new_for_crate(db.crate_graph().iter().next().unwrap(), TyBuilder::builtin(self.inner))
+ }
+
+ pub fn name(self) -> Name {
+ self.inner.as_name()
+ }
+
+ pub fn is_int(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Int(_))
+ }
+
+ pub fn is_uint(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Uint(_))
+ }
+
+ pub fn is_float(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_))
+ }
+
+ pub fn is_char(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Char)
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Bool)
+ }
+
+ pub fn is_str(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Str)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroKind {
+ /// `macro_rules!` or Macros 2.0 macro.
+ Declarative,
+ /// A built-in or custom derive.
+ Derive,
+ /// A built-in function-like macro.
+ BuiltIn,
+ /// A procedural attribute macro.
+ Attr,
+ /// A function-like procedural macro.
+ ProcMacro,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Macro {
+ pub(crate) id: MacroId,
+}
+
+impl Macro {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self.id {
+ MacroId::Macro2Id(id) => db.macro2_data(id).name.clone(),
+ MacroId::MacroRulesId(id) => db.macro_rules_data(id).name.clone(),
+ MacroId::ProcMacroId(id) => db.proc_macro_data(id).name.clone(),
+ }
+ }
+
+ pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export)
+ }
+
+ pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
+ match self.id {
+ MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
+ ProcMacroKind::CustomDerive => MacroKind::Derive,
+ ProcMacroKind::FuncLike => MacroKind::ProcMacro,
+ ProcMacroKind::Attr => MacroKind::Attr,
+ },
+ }
+ }
+
+ pub fn is_fn_like(&self, db: &dyn HirDatabase) -> bool {
+ match self.kind(db) {
+ MacroKind::Declarative | MacroKind::BuiltIn | MacroKind::ProcMacro => true,
+ MacroKind::Attr | MacroKind::Derive => false,
+ }
+ }
+
+ pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
+ match self.id {
+ MacroId::Macro2Id(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::MacroRulesId(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::ProcMacroId(_) => false,
+ }
+ }
+
+ pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Attr)
+ }
+
+ pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Derive)
+ }
+}
+
+impl HasVisibility for Macro {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self.id {
+ MacroId::Macro2Id(id) => {
+ let data = db.macro2_data(id);
+ let visibility = &data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+ MacroId::MacroRulesId(_) => Visibility::Public,
+ MacroId::ProcMacroId(_) => Visibility::Public,
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum ItemInNs {
+ Types(ModuleDef),
+ Values(ModuleDef),
+ Macros(Macro),
+}
+
+impl From<Macro> for ItemInNs {
+ fn from(it: Macro) -> Self {
+ Self::Macros(it)
+ }
+}
+
+impl From<ModuleDef> for ItemInNs {
+ fn from(module_def: ModuleDef) -> Self {
+ match module_def {
+ ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
+ ItemInNs::Values(module_def)
+ }
+ _ => ItemInNs::Types(module_def),
+ }
+ }
+}
+
+impl ItemInNs {
+ pub fn as_module_def(self) -> Option<ModuleDef> {
+ match self {
+ ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
+ ItemInNs::Macros(_) => None,
+ }
+ }
+
+ /// Returns the crate defining this item (or `None` if `self` is built-in).
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate()),
+ ItemInNs::Macros(id) => Some(id.module(db).krate()),
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ItemInNs::Types(it) | ItemInNs::Values(it) => it.attrs(db),
+ ItemInNs::Macros(it) => Some(it.attrs(db)),
+ }
+ }
+}
+
+/// Invariant: `inner.as_assoc_item(db).is_some()`
+/// We do not actively enforce this invariant.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Function(Function),
+ Const(Const),
+ TypeAlias(TypeAlias),
+}
+#[derive(Debug)]
+pub enum AssocItemContainer {
+ Trait(Trait),
+ Impl(Impl),
+}
+pub trait AsAssocItem {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem>;
+}
+
+impl AsAssocItem for Function {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Function, self.id)
+ }
+}
+impl AsAssocItem for Const {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Const, self.id)
+ }
+}
+impl AsAssocItem for TypeAlias {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::TypeAlias, self.id)
+ }
+}
+impl AsAssocItem for ModuleDef {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ match self {
+ ModuleDef::Function(it) => it.as_assoc_item(db),
+ ModuleDef::Const(it) => it.as_assoc_item(db),
+ ModuleDef::TypeAlias(it) => it.as_assoc_item(db),
+ _ => None,
+ }
+ }
+}
+fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
+where
+ ID: Lookup<Data = AssocItemLoc<AST>>,
+ DEF: From<ID>,
+ CTOR: FnOnce(DEF) -> AssocItem,
+ AST: ItemTreeNode,
+{
+ match id.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+impl AssocItem {
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ AssocItem::Function(it) => Some(it.name(db)),
+ AssocItem::Const(it) => it.name(db),
+ AssocItem::TypeAlias(it) => Some(it.name(db)),
+ }
+ }
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ AssocItem::Function(f) => f.module(db),
+ AssocItem::Const(c) => c.module(db),
+ AssocItem::TypeAlias(t) => t.module(db),
+ }
+ }
+ pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
+ let container = match self {
+ AssocItem::Function(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::Const(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container,
+ };
+ match container {
+ ItemContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
+ ItemContainerId::ImplId(id) => AssocItemContainer::Impl(id.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ panic!("invalid AssocItem")
+ }
+ }
+ }
+
+ pub fn containing_trait(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_or_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ }
+ }
+}
+
+impl HasVisibility for AssocItem {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ AssocItem::Function(f) => f.visibility(db),
+ AssocItem::Const(c) => c.visibility(db),
+ AssocItem::TypeAlias(t) => t.visibility(db),
+ }
+ }
+}
+
+impl From<AssocItem> for ModuleDef {
+ fn from(assoc: AssocItem) -> Self {
+ match assoc {
+ AssocItem::Function(it) => ModuleDef::Function(it),
+ AssocItem::Const(it) => ModuleDef::Const(it),
+ AssocItem::TypeAlias(it) => ModuleDef::TypeAlias(it),
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum GenericDef {
+ Function(Function),
+ Adt(Adt),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Impl(Impl),
+ // enum variants cannot have generics themselves, but their parent enums
+ // can, and this makes some code easier to write
+ Variant(Variant),
+ // consts can have type parameters from their parents (i.e. associated consts of traits)
+ Const(Const),
+}
+impl_from!(
+ Function,
+ Adt(Struct, Enum, Union),
+ Trait,
+ TypeAlias,
+ Impl,
+ Variant,
+ Const
+ for GenericDef
+);
+
+impl GenericDef {
+ pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
+ let generics = db.generic_params(self.into());
+ let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
+ let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
+ match toc.split(db) {
+ Either::Left(x) => GenericParam::ConstParam(x),
+ Either::Right(x) => GenericParam::TypeParam(x),
+ }
+ });
+ let lt_params = generics
+ .lifetimes
+ .iter()
+ .map(|(local_id, _)| LifetimeParam {
+ id: LifetimeParamId { parent: self.into(), local_id },
+ })
+ .map(GenericParam::LifetimeParam);
+ lt_params.chain(ty_params).collect()
+ }
+
+ pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
+ let generics = db.generic_params(self.into());
+ generics
+ .type_or_consts
+ .iter()
+ .map(|(local_id, _)| TypeOrConstParam {
+ id: TypeOrConstParamId { parent: self.into(), local_id },
+ })
+ .collect()
+ }
+}
+
+/// A single local definition.
+///
+/// If the definition of this is part of a "MultiLocal", that is a local that has multiple declarations due to or-patterns
+/// then this only references a single one of those.
+/// To retrieve the other locals you should use [`Local::associated_locals`]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Local {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) pat_id: PatId,
+}
+
+impl Local {
+ pub fn is_param(self, db: &dyn HirDatabase) -> bool {
+ let src = self.source(db);
+ match src.value {
+ Either::Left(pat) => pat
+ .syntax()
+ .ancestors()
+ .map(|it| it.kind())
+ .take_while(|&kind| ast::Pat::can_cast(kind) || ast::Param::can_cast(kind))
+ .any(ast::Param::can_cast),
+ Either::Right(_) => true,
+ }
+ }
+
+ pub fn as_self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ match self.parent {
+ DefWithBodyId::FunctionId(func) if self.is_self(db) => Some(SelfParam { func }),
+ _ => None,
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ match &body[self.pat_id] {
+ Pat::Bind { name, .. } => name.clone(),
+ _ => {
+ stdx::never!("hir::Local is missing a name!");
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn is_self(self, db: &dyn HirDatabase) -> bool {
+ self.name(db) == name![self]
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
+ }
+
+ pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(
+ &body[self.pat_id],
+ Pat::Bind { mode: BindingAnnotation::Ref | BindingAnnotation::RefMut, .. }
+ )
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let def = self.parent;
+ let infer = db.infer(def);
+ let ty = infer[self.pat_id].clone();
+ Type::new(db, def, ty)
+ }
+
+ pub fn associated_locals(self, db: &dyn HirDatabase) -> Box<[Local]> {
+ let body = db.body(self.parent);
+ body.ident_patterns_for(&self.pat_id)
+ .iter()
+ .map(|&pat_id| Local { parent: self.parent, pat_id })
+ .collect()
+ }
+
+ /// If this local is part of a multi-local, retrieve the representative local.
+ /// That is the local that references are being resolved to.
+ pub fn representative(self, db: &dyn HirDatabase) -> Local {
+ let body = db.body(self.parent);
+ Local { pat_id: body.pattern_representative(self.pat_id), ..self }
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| match ast {
+ // Suspicious unwrap
+ Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
+ Either::Right(it) => Either::Right(it.to_node(&root)),
+ })
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct DeriveHelper {
+ pub(crate) derive: MacroId,
+ pub(crate) idx: usize,
+}
+
+impl DeriveHelper {
+ pub fn derive(&self) -> Macro {
+ Macro { id: self.derive.into() }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self.derive {
+ MacroId::Macro2Id(_) => None,
+ MacroId::MacroRulesId(_) => None,
+ MacroId::ProcMacroId(proc_macro) => db
+ .proc_macro_data(proc_macro)
+ .helpers
+ .as_ref()
+ .and_then(|it| it.get(self.idx))
+ .cloned(),
+ }
+ .unwrap_or_else(|| Name::missing())
+ }
+}
+
+// FIXME: Wrong name? This is could also be a registered attribute
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct BuiltinAttr {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl BuiltinAttr {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)?;
+ Some(BuiltinAttr { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::INERT_ATTRIBUTES
+ .iter()
+ .position(|tool| tool.name == name)
+ .map(|idx| BuiltinAttr { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].name),
+ }
+ }
+
+ pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
+ match self.krate {
+ Some(_) => None,
+ None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].template),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ToolModule {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl ToolModule {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)?;
+ Some(ToolModule { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::TOOL_MODULES
+ .iter()
+ .position(|&tool| tool == name)
+ .map(|idx| ToolModule { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx]),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) label_id: LabelId,
+}
+
+impl Label {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ body[self.label_id].name.clone()
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.label_syntax(self.label_id);
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| ast.to_node(&root))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ TypeParam(TypeParam),
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+}
+impl_from!(TypeParam, ConstParam, LifetimeParam for GenericParam);
+
+impl GenericParam {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ GenericParam::TypeParam(it) => it.module(db),
+ GenericParam::ConstParam(it) => it.module(db),
+ GenericParam::LifetimeParam(it) => it.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ GenericParam::TypeParam(it) => it.name(db),
+ GenericParam::ConstParam(it) => it.name(db),
+ GenericParam::LifetimeParam(it) => it.name(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) id: TypeParamId,
+}
+
+impl TypeParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ self.merge().name(db)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ /// Is this type parameter implicitly introduced (eg. `Self` in a trait or an `impl Trait`
+ /// argument)?
+ pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
+ let params = db.generic_params(self.id.parent());
+ let data = &params.type_or_consts[self.id.local_id()];
+ match data.type_param().unwrap().provenance {
+ hir_def::generics::TypeParamProvenance::TypeParamList => false,
+ hir_def::generics::TypeParamProvenance::TraitSelf
+ | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => true,
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty =
+ TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ /// FIXME: this only lists trait bounds from the item defining the type
+ /// parameter, not additional bounds that might be added e.g. by a method if
+ /// the parameter comes from an impl!
+ pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
+ db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
+ .iter()
+ .filter_map(|pred| match &pred.skip_binders().skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+
+ pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
+ let params = db.generic_defaults(self.id.parent());
+ let local_idx = hir_ty::param_idx(db, self.id.into())?;
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty = params.get(local_idx)?.clone();
+ let subst = TyBuilder::placeholder_subst(db, self.id.parent());
+ let ty = ty.substitute(Interner, &subst_prefix(&subst, local_idx));
+ match ty.data(Interner) {
+ GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) id: LifetimeParamId,
+}
+
+impl LifetimeParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ params.lifetimes[self.id.local_id].name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) id: ConstParamId,
+}
+
+impl ConstParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent());
+ match params.type_or_consts[self.id.local_id()].name() {
+ Some(x) => x.clone(),
+ None => {
+ never!();
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent().into()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new(db, self.id.parent(), db.const_param_ty(self.id))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeOrConstParam {
+ pub(crate) id: TypeOrConstParamId,
+}
+
+impl TypeOrConstParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ match params.type_or_consts[self.id.local_id].name() {
+ Some(n) => n.clone(),
+ _ => Name::missing(),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+
+ pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
+ let params = db.generic_params(self.id.parent);
+ match &params.type_or_consts[self.id.local_id] {
+ hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
+ Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
+ }
+ hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
+ Either::Left(ConstParam { id: ConstParamId::from_unchecked(self.id) })
+ }
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ match self.split(db) {
+ Either::Left(x) => x.ty(db),
+ Either::Right(x) => x.ty(db),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) id: ImplId,
+}
+
+impl Impl {
+ pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> {
+ let inherent = db.inherent_impls_in_crate(krate.id);
+ let trait_ = db.trait_impls_in_crate(krate.id);
+
+ inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
+ }
+
+ pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
+ let def_crates = match method_resolution::def_crates(db, &ty, env.krate) {
+ Some(def_crates) => def_crates,
+ None => return Vec::new(),
+ };
+
+ let filter = |impl_def: &Impl| {
+ let self_ty = impl_def.self_ty(db);
+ let rref = self_ty.remove_ref();
+ ty.equals_ctor(rref.as_ref().map_or(&self_ty.ty, |it| &it.ty))
+ };
+
+ let fp = TyFingerprint::for_inherent_impl(&ty);
+ let fp = match fp {
+ Some(fp) => fp,
+ None => return Vec::new(),
+ };
+
+ let mut all = Vec::new();
+ def_crates.iter().for_each(|&id| {
+ all.extend(
+ db.inherent_impls_in_crate(id)
+ .for_self_ty(&ty)
+ .iter()
+ .cloned()
+ .map(Self::from)
+ .filter(filter),
+ )
+ });
+ for id in def_crates
+ .iter()
+ .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
+ .map(|Crate { id }| id)
+ .chain(def_crates.iter().copied())
+ .unique()
+ {
+ all.extend(
+ db.trait_impls_in_crate(id)
+ .for_self_ty_without_blanket_impls(fp)
+ .map(Self::from)
+ .filter(filter),
+ );
+ }
+ all
+ }
+
+ pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
+ let krate = trait_.module(db).krate();
+ let mut all = Vec::new();
+ for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() {
+ let impls = db.trait_impls_in_crate(id);
+ all.extend(impls.for_trait(trait_.id).map(Self::from))
+ }
+ all
+ }
+
+ // FIXME: the return type is wrong. This should be a hir version of
+ // `TraitRef` (to account for parameters and qualifiers)
+ pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
+ let trait_ref = db.impl_trait(self.id)?.skip_binders().clone();
+ let id = hir_ty::from_chalk_trait_id(trait_ref.trait_id);
+ Some(Trait { id })
+ }
+
+ pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect()
+ }
+
+ pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
+ db.impl_data(self.id).is_negative
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).container.into()
+ }
+
+ pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ let src = self.source(db)?;
+ src.file_id.is_builtin_derive(db.upcast())
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct Type {
+ env: Arc<TraitEnvironment>,
+ ty: Ty,
+}
+
+impl Type {
+ pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver, ty: Ty) -> Type {
+ Type::new_with_resolver_inner(db, resolver, ty)
+ }
+
+ pub(crate) fn new_with_resolver_inner(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ ty: Ty,
+ ) -> Type {
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
+ Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+ }
+
+ pub fn reference(inner: &Type, m: Mutability) -> Type {
+ inner.derived(
+ TyKind::Ref(
+ if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
+ hir_ty::static_lifetime(),
+ inner.ty.clone(),
+ )
+ .intern(Interner),
+ )
+ }
+
+ fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
+ let resolver = lexical_env.resolver(db.upcast());
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ fn from_def(db: &dyn HirDatabase, def: impl HasResolver + Into<TyDefId>) -> Type {
+ let ty = TyBuilder::def_ty(db, def.into()).fill_with_unknown().build();
+ Type::new(db, def, ty)
+ }
+
+ pub fn new_slice(ty: Type) -> Type {
+ Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
+ }
+
+ pub fn is_unit(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Tuple(0, ..))
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Bool))
+ }
+
+ pub fn is_never(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Never)
+ }
+
+ pub fn is_mutable_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(hir_ty::Mutability::Mut, ..))
+ }
+
+ pub fn is_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(..))
+ }
+
+ pub fn as_reference(&self) -> Option<(Type, Mutability)> {
+ let (ty, _lt, m) = self.ty.as_reference()?;
+ let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
+ Some((self.derived(ty.clone()), m))
+ }
+
+ pub fn is_slice(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Slice(..))
+ }
+
+ pub fn is_usize(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)))
+ }
+
+ pub fn remove_ref(&self) -> Option<Type> {
+ match &self.ty.kind(Interner) {
+ TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
+ _ => None,
+ }
+ }
+
+ pub fn strip_references(&self) -> Type {
+ self.derived(self.ty.strip_references().clone())
+ }
+
+ pub fn strip_reference(&self) -> Type {
+ self.derived(self.ty.strip_reference().clone())
+ }
+
+ pub fn is_unknown(&self) -> bool {
+ self.ty.is_unknown()
+ }
+
+ /// Checks that particular type `ty` implements `std::future::Future`.
+ /// This function is used in `.await` syntax completion.
+ pub fn impls_future(&self, db: &dyn HirDatabase) -> bool {
+ let std_future_trait = db
+ .lang_item(self.env.krate, SmolStr::new_inline("future_trait"))
+ .and_then(|it| it.as_trait());
+ let std_future_trait = match std_future_trait {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), std_future_trait)
+ }
+
+ /// Checks that particular type `ty` implements `std::ops::FnOnce`.
+ ///
+ /// This function can be used to check if a particular type is callable, since FnOnce is a
+ /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
+ pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
+ let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait_unique(
+ &canonical_ty,
+ db,
+ self.env.clone(),
+ fnonce_trait,
+ )
+ }
+
+ pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let trait_ref = TyBuilder::trait_ref(db, trait_.id)
+ .push(self.ty.clone())
+ .fill(|x| {
+ let r = it.next().unwrap();
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => {
+ // FIXME: this code is not covered in tests.
+ unknown_const_as_generic(ty.clone())
+ }
+ }
+ })
+ .build();
+
+ let goal = Canonical {
+ value: hir_ty::InEnvironment::new(&self.env.env, trait_ref.cast(Interner)),
+ binders: CanonicalVarKinds::empty(Interner),
+ };
+
+ db.trait_solve(self.env.krate, goal).is_some()
+ }
+
+ pub fn normalize_trait_assoc_type(
+ &self,
+ db: &dyn HirDatabase,
+ args: &[Type],
+ alias: TypeAlias,
+ ) -> Option<Type> {
+ let mut args = args.iter();
+ let projection = TyBuilder::assoc_type_projection(db, alias.id)
+ .push(self.ty.clone())
+ .fill(|x| {
+ // FIXME: this code is not covered in tests.
+ match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
+ }
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ let goal = hir_ty::make_canonical(
+ InEnvironment::new(
+ &self.env.env,
+ AliasEq {
+ alias: AliasTy::Projection(projection),
+ ty: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ }
+ .cast(Interner),
+ ),
+ [TyVariableKind::General].into_iter(),
+ );
+
+ match db.trait_solve(self.env.krate, goal)? {
+ Solution::Unique(s) => s
+ .value
+ .subst
+ .as_slice(Interner)
+ .first()
+ .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone())),
+ Solution::Ambig(_) => None,
+ }
+ }
+
+ pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
+ let lang_item = db.lang_item(self.env.krate, SmolStr::new_inline("copy"));
+ let copy_trait = match lang_item {
+ Some(LangItemTarget::TraitId(it)) => it,
+ _ => return false,
+ };
+ self.impls_trait(db, copy_trait.into(), &[])
+ }
+
+ pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
+ let callee = match self.ty.kind(Interner) {
+ TyKind::Closure(id, _) => Callee::Closure(*id),
+ TyKind::Function(_) => Callee::FnPtr,
+ _ => Callee::Def(self.ty.callable_def(db)?),
+ };
+
+ let sig = self.ty.callable_sig(db)?;
+ Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false })
+ }
+
+ pub fn is_closure(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Closure { .. })
+ }
+
+ pub fn is_fn(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. })
+ }
+
+ pub fn is_array(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Array(..))
+ }
+
+ pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
+ let adt_id = match *self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
+ _ => return false,
+ };
+
+ let adt = adt_id.into();
+ match adt {
+ Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)),
+ _ => false,
+ }
+ }
+
+ pub fn is_raw_ptr(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Raw(..))
+ }
+
+ pub fn contains_unknown(&self) -> bool {
+ return go(&self.ty);
+
+ fn go(ty: &Ty) -> bool {
+ match ty.kind(Interner) {
+ TyKind::Error => true,
+
+ TyKind::Adt(_, substs)
+ | TyKind::AssociatedType(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::OpaqueType(_, substs)
+ | TyKind::FnDef(_, substs)
+ | TyKind::Closure(_, substs) => {
+ substs.iter(Interner).filter_map(|a| a.ty(Interner)).any(go)
+ }
+
+ TyKind::Array(_ty, len) if len.is_unknown() => true,
+ TyKind::Array(ty, _)
+ | TyKind::Slice(ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Ref(_, _, ty) => go(ty),
+
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Never
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Dyn(_)
+ | TyKind::Function(_)
+ | TyKind::Alias(_)
+ | TyKind::Foreign(_)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => false,
+ }
+ }
+ }
+
+ pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
+ let (variant_id, substs) = match self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(AdtId::StructId(s)), substs) => ((*s).into(), substs),
+ TyKind::Adt(hir_ty::AdtId(AdtId::UnionId(u)), substs) => ((*u).into(), substs),
+ _ => return Vec::new(),
+ };
+
+ db.field_types(variant_id)
+ .iter()
+ .map(|(local_id, ty)| {
+ let def = Field { parent: variant_id.into(), id: local_id };
+ let ty = ty.clone().substitute(Interner, substs);
+ (def, self.derived(ty))
+ })
+ .collect()
+ }
+
+ pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
+ if let TyKind::Tuple(_, substs) = &self.ty.kind(Interner) {
+ substs
+ .iter(Interner)
+ .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone()))
+ .collect()
+ } else {
+ Vec::new()
+ }
+ }
+
+ pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
+ self.autoderef_(db).map(move |ty| self.derived(ty))
+ }
+
+ fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Ty> + 'a {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+ let environment = self.env.clone();
+ autoderef(db, environment, canonical).map(|canonical| canonical.value)
+ }
+
+ // This would be nicer if it just returned an iterator, but that runs into
+ // lifetime problems, because we need to borrow temp `CrateImplDefs`.
+ pub fn iterate_assoc_items<T>(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let mut slot = None;
+ self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| {
+ slot = callback(assoc_item_id.into());
+ slot.is_some()
+ });
+ slot
+ }
+
+ fn iterate_assoc_items_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ callback: &mut dyn FnMut(AssocItemId) -> bool,
+ ) {
+ let def_crates = match method_resolution::def_crates(db, &self.ty, krate.id) {
+ Some(it) => it,
+ None => return,
+ };
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+
+ for impl_def in impls.for_self_ty(&self.ty) {
+ for &item in db.impl_data(*impl_def).items.iter() {
+ if callback(item) {
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
+ self.ty
+ .strip_references()
+ .as_adt()
+ .into_iter()
+ .flat_map(|(_, substs)| substs.iter(Interner))
+ .filter_map(|arg| arg.ty(Interner).cloned())
+ .map(move |ty| self.derived(ty))
+ }
+
+ pub fn iterate_method_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ // FIXME this can be retrieved from `scope`, except autoimport uses this
+ // to specify a different set, so the method needs to be split
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(Function) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_method_candidates");
+ let mut slot = None;
+
+ self.iterate_method_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let AssocItemId::FunctionId(func) = assoc_item_id {
+ if let Some(res) = callback(func.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_method_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_method_candidates_dyn(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ method_resolution::LookupMode::MethodCall,
+ &mut |_adj, id| callback(id),
+ );
+ }
+
+ pub fn iterate_path_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_path_candidates");
+ let mut slot = None;
+ self.iterate_path_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let Some(res) = callback(assoc_item_id.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_path_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_path_candidates(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ &mut |id| callback(id),
+ );
+ }
+
+ pub fn as_adt(&self) -> Option<Adt> {
+ let (adt, _subst) = self.ty.as_adt()?;
+ Some(adt.into())
+ }
+
+ pub fn as_builtin(&self) -> Option<BuiltinType> {
+ self.ty.as_builtin().map(|inner| BuiltinType { inner })
+ }
+
+ pub fn as_dyn_trait(&self) -> Option<Trait> {
+ self.ty.dyn_trait().map(Into::into)
+ }
+
+ /// If a type can be represented as `dyn Trait`, returns all traits accessible via this type,
+ /// or an empty iterator otherwise.
+ pub fn applicable_inherent_traits<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ ) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("applicable_inherent_traits");
+ self.autoderef_(db)
+ .filter_map(|ty| ty.dyn_trait())
+ .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
+ .map(Trait::from)
+ }
+
+ pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("env_traits");
+ self.autoderef_(db)
+ .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
+ .flat_map(|ty| {
+ self.env
+ .traits_in_scope_from_clauses(ty)
+ .flat_map(|t| hir_ty::all_super_traits(db.upcast(), t))
+ })
+ .map(Trait::from)
+ }
+
+ pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
+ self.ty.impl_trait_bounds(db).map(|it| {
+ it.into_iter().filter_map(|pred| match pred.skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ })
+ }
+
+ pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> {
+ self.ty.associated_type_parent_trait(db).map(Into::into)
+ }
+
+ fn derived(&self, ty: Ty) -> Type {
+ Type { env: self.env.clone(), ty }
+ }
+
+ pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
+ // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself.
+ // We need a different order here.
+
+ fn walk_substs(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ substs: &Substitution,
+ cb: &mut impl FnMut(Type),
+ ) {
+ for ty in substs.iter(Interner).filter_map(|a| a.ty(Interner)) {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+
+ fn walk_bounds(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ bounds: &[QuantifiedWhereClause],
+ cb: &mut impl FnMut(Type),
+ ) {
+ for pred in bounds {
+ if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
+ cb(type_.clone());
+ // skip the self type. it's likely the type we just got the bounds from
+ for ty in
+ trait_ref.substitution.iter(Interner).skip(1).filter_map(|a| a.ty(Interner))
+ {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+ }
+ }
+
+ fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
+ let ty = type_.ty.strip_references();
+ match ty.kind(Interner) {
+ TyKind::Adt(_, substs) => {
+ cb(type_.derived(ty.clone()));
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::AssociatedType(_, substs) => {
+ if ty.associated_type_parent_trait(db).is_some() {
+ cb(type_.derived(ty.clone()));
+ }
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::OpaqueType(_, subst) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, subst, cb);
+ }
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, &opaque_ty.substitution, cb);
+ }
+ TyKind::Placeholder(_) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+ }
+ TyKind::Dyn(bounds) => {
+ walk_bounds(
+ db,
+ &type_.derived(ty.clone()),
+ bounds.bounds.skip_binders().interned(),
+ cb,
+ );
+ }
+
+ TyKind::Ref(_, _, ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Array(ty, _)
+ | TyKind::Slice(ty) => {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+
+ TyKind::FnDef(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::Closure(.., substs) => {
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
+ walk_substs(db, type_, &substitution.0, cb);
+ }
+
+ _ => {}
+ }
+ }
+
+ walk_type(db, self, &mut cb);
+ }
+
+ pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
+ hir_ty::could_unify(db, self.env.clone(), &tys)
+ }
+
+ pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
+ hir_ty::could_coerce(db, self.env.clone(), &tys)
+ }
+
+ pub fn as_type_param(&self, db: &dyn HirDatabase) -> Option<TypeParam> {
+ match self.ty.kind(Interner) {
+ TyKind::Placeholder(p) => Some(TypeParam {
+ id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)),
+ }),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Callable {
+ ty: Type,
+ sig: CallableSig,
+ callee: Callee,
+ pub(crate) is_bound_method: bool,
+}
+
+#[derive(Debug)]
+enum Callee {
+ Def(CallableDefId),
+ Closure(ClosureId),
+ FnPtr,
+}
+
+pub enum CallableKind {
+ Function(Function),
+ TupleStruct(Struct),
+ TupleEnumVariant(Variant),
+ Closure,
+ FnPtr,
+}
+
+impl Callable {
+ pub fn kind(&self) -> CallableKind {
+ use Callee::*;
+ match self.callee {
+ Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
+ Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
+ Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
+ Closure(_) => CallableKind::Closure,
+ FnPtr => CallableKind::FnPtr,
+ }
+ }
+ pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
+ let func = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
+ _ => return None,
+ };
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ let param_list = src.value.param_list()?;
+ param_list.self_param()
+ }
+ pub fn n_params(&self) -> usize {
+ self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
+ }
+ pub fn params(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
+ let types = self
+ .sig
+ .params()
+ .iter()
+ .skip(if self.is_bound_method { 1 } else { 0 })
+ .map(|ty| self.ty.derived(ty.clone()));
+ let map_param = |it: ast::Param| it.pat().map(Either::Right);
+ let patterns = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(func)) => {
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ src.value.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ })
+ }
+ Callee::Closure(closure_id) => match closure_source(db, closure_id) {
+ Some(src) => src.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ }),
+ None => None,
+ },
+ _ => None,
+ };
+ patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
+ }
+ pub fn return_type(&self) -> Type {
+ self.ty.derived(self.sig.ret().clone())
+ }
+}
+
+fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
+ let (owner, expr_id) = db.lookup_intern_closure(closure.into());
+ let (_, source_map) = db.body_with_source_map(owner);
+ let ast = source_map.expr_syntax(expr_id).ok()?;
+ let root = ast.file_syntax(db.upcast());
+ let expr = ast.value.to_node(&root);
+ match expr {
+ ast::Expr::ClosureExpr(it) => Some(it),
+ _ => None,
+ }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+/// For IDE only
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ScopeDef {
+ ModuleDef(ModuleDef),
+ GenericParam(GenericParam),
+ ImplSelfType(Impl),
+ AdtSelfType(Adt),
+ Local(Local),
+ Label(Label),
+ Unknown,
+}
+
+impl ScopeDef {
+ pub fn all_items(def: PerNs) -> ArrayVec<Self, 3> {
+ let mut items = ArrayVec::new();
+
+ match (def.take_types(), def.take_values()) {
+ (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())),
+ (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())),
+ (Some(m1), Some(m2)) => {
+ // Some items, like unit structs and enum variants, are
+ // returned as both a type and a value. Here we want
+ // to de-duplicate them.
+ if m1 != m2 {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ items.push(ScopeDef::ModuleDef(m2.into()));
+ } else {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ }
+ }
+ (None, None) => {}
+ };
+
+ if let Some(macro_def_id) = def.take_macros() {
+ items.push(ScopeDef::ModuleDef(ModuleDef::Macro(macro_def_id.into())));
+ }
+
+ if items.is_empty() {
+ items.push(ScopeDef::Unknown);
+ }
+
+ items
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.attrs(db),
+ ScopeDef::GenericParam(it) => Some(it.attrs(db)),
+ ScopeDef::ImplSelfType(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => None,
+ }
+ }
+
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.module(db).map(|m| m.krate()),
+ ScopeDef::GenericParam(it) => Some(it.module(db).krate()),
+ ScopeDef::ImplSelfType(_) => None,
+ ScopeDef::AdtSelfType(it) => Some(it.module(db).krate()),
+ ScopeDef::Local(it) => Some(it.module(db).krate()),
+ ScopeDef::Label(it) => Some(it.module(db).krate()),
+ ScopeDef::Unknown => None,
+ }
+ }
+}
+
+impl From<ItemInNs> for ScopeDef {
+ fn from(item: ItemInNs) -> Self {
+ match item {
+ ItemInNs::Types(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Values(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Macros(id) => ScopeDef::ModuleDef(ModuleDef::Macro(id)),
+ }
+ }
+}
+
+pub trait HasVisibility {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility;
+ fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool {
+ let vis = self.visibility(db);
+ vis.is_visible_from(db.upcast(), module.id)
+ }
+}
+
+/// Trait for obtaining the defining crate of an item.
+pub trait HasCrate {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate;
+}
+
+impl<T: hir_def::HasModule> HasCrate for T {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db.upcast()).krate().into()
+ }
+}
+
+impl HasCrate for AssocItem {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Struct {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Union {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Field {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.parent_def(db).module(db).krate()
+ }
+}
+
+impl HasCrate for Variant {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Function {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Const {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for TypeAlias {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Type {
+ fn krate(&self, _db: &dyn HirDatabase) -> Crate {
+ self.env.krate.into()
+ }
+}
+
+impl HasCrate for Macro {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Trait {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Static {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Adt {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Module {
+ fn krate(&self, _: &dyn HirDatabase) -> Crate {
+ Module::krate(*self)
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
new file mode 100644
index 000000000..c84318b2f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -0,0 +1,1540 @@
+//! See `Semantics`.
+
+mod source_to_def;
+
+use std::{cell::RefCell, fmt, iter, ops};
+
+use base_db::{FileId, FileRange};
+use hir_def::{
+ body, macro_id_to_def_id,
+ resolver::{self, HasResolver, Resolver, TypeNs},
+ type_ref::Mutability,
+ AsMacroCall, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ db::AstDatabase,
+ name::{known, AsName},
+ ExpansionInfo, MacroCallId,
+};
+use itertools::Itertools;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{smallvec, SmallVec};
+use syntax::{
+ algo::skip_trivia_token,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
+ match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+};
+
+use crate::{
+ db::HirDatabase,
+ semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
+ source_analyzer::{resolve_hir_path, SourceAnalyzer},
+ Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function,
+ HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
+ Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathResolution {
+ /// An item
+ Def(ModuleDef),
+ /// A local binding (only value namespace)
+ Local(Local),
+ /// A type parameter
+ TypeParam(TypeParam),
+ /// A const parameter
+ ConstParam(ConstParam),
+ SelfType(Impl),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+ DeriveHelper(DeriveHelper),
+}
+
+impl PathResolution {
+ pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
+ match self {
+ PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
+ PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
+ Some(TypeNs::BuiltinType((*builtin).into()))
+ }
+ PathResolution::Def(
+ ModuleDef::Const(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::Function(_)
+ | ModuleDef::Module(_)
+ | ModuleDef::Static(_)
+ | ModuleDef::Trait(_),
+ ) => None,
+ PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
+ Some(TypeNs::TypeAliasId((*alias).into()))
+ }
+ PathResolution::BuiltinAttr(_)
+ | PathResolution::ToolModule(_)
+ | PathResolution::Local(_)
+ | PathResolution::DeriveHelper(_)
+ | PathResolution::ConstParam(_) => None,
+ PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
+ PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct TypeInfo {
+ /// The original type of the expression or pattern.
+ pub original: Type,
+ /// The adjusted type, if an adjustment happened.
+ pub adjusted: Option<Type>,
+}
+
+impl TypeInfo {
+ pub fn original(self) -> Type {
+ self.original
+ }
+
+ pub fn has_adjustment(&self) -> bool {
+ self.adjusted.is_some()
+ }
+
+ /// The adjusted type, or the original in case no adjustments occurred.
+ pub fn adjusted(self) -> Type {
+ self.adjusted.unwrap_or(self.original)
+ }
+}
+
+/// Primary API to get semantic information, like types, from syntax trees.
+pub struct Semantics<'db, DB> {
+ pub db: &'db DB,
+ imp: SemanticsImpl<'db>,
+}
+
+pub struct SemanticsImpl<'db> {
+ pub db: &'db dyn HirDatabase,
+ s2d_cache: RefCell<SourceToDefCache>,
+ expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
+ // Rootnode to HirFileId cache
+ cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+ // MacroCall to its expansion's HirFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+}
+
+impl<DB> fmt::Debug for Semantics<'_, DB> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Semantics {{ ... }}")
+ }
+}
+
+impl<'db, DB: HirDatabase> Semantics<'db, DB> {
+ pub fn new(db: &DB) -> Semantics<'_, DB> {
+ let impl_ = SemanticsImpl::new(db);
+ Semantics { db, imp: impl_ }
+ }
+
+ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ self.imp.parse(file_id)
+ }
+
+ pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ self.imp.parse_or_expand(file_id)
+ }
+
+ pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ self.imp.expand(macro_call)
+ }
+
+ /// If `item` has an attribute macro attached to it, expands it.
+ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ self.imp.expand_attr_macro(item)
+ }
+
+ pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ self.imp.expand_derive_as_pseudo_attr_macro(attr)
+ }
+
+ pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ self.imp.resolve_derive_macro(derive)
+ }
+
+ pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ self.imp.expand_derive_macro(derive)
+ }
+
+ pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ self.imp.is_attr_macro_call(item)
+ }
+
+ pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
+ self.imp.is_derive_annotated(item)
+ }
+
+ pub fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_attr_macro(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_derive_as_pseudo_attr_macro(
+ actual_macro_call,
+ speculative_args,
+ token_to_map,
+ )
+ }
+
+ /// Descend the token into macrocalls to its first mapped counterpart.
+ pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_single(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts.
+ pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
+ ///
+ /// Returns the original non descended token if none of the mapped counterparts have the same text.
+ pub fn descend_into_macros_with_same_text(
+ &self,
+ token: SyntaxToken,
+ ) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros_with_same_text(token)
+ }
+
+ pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_with_kind_preference(token)
+ }
+
+ /// Maps a node down by mapping its first and last token down.
+ pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ self.imp.descend_node_into_attributes(node)
+ }
+
+ /// Search for a definition's source and cache its syntax tree
+ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ self.imp.source(def)
+ }
+
+ pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
+ self.imp.find_file(syntax_node).file_id
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ /// If upmapping is not possible, this will fall back to the range of the macro call of the
+ /// macro file the node resides in.
+ pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ self.imp.original_range(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ self.imp.original_range_opt(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.imp.original_ast_node(node)
+ }
+
+ pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
+ self.imp.diagnostics_display_range(diagnostics)
+ }
+
+ pub fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
+ }
+
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_with_macros(node)
+ }
+
+ pub fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_at_offset_with_macros(node, offset)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
+ /// search up until it is of the target AstNode type
+ pub fn find_node_at_offset_with_macros<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_node_at_offset_with_descend<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
+ &'slf self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = N> + 'slf {
+ self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
+ }
+
+ pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ self.imp.resolve_lifetime_param(lifetime)
+ }
+
+ pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ self.imp.resolve_label(lifetime)
+ }
+
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ self.imp.resolve_type(ty)
+ }
+
+ pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
+ self.imp.resolve_trait(trait_)
+ }
+
+ // FIXME: Figure out a nice interface to inspect adjustments
+ pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.imp.is_implicit_reborrow(expr)
+ }
+
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.imp.type_of_expr(expr)
+ }
+
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.imp.type_of_pat(pat)
+ }
+
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.imp.type_of_self(param)
+ }
+
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.imp.pattern_adjustments(pat)
+ }
+
+ pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.imp.binding_mode_of_pat(pat)
+ }
+
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
+ self.imp.resolve_method_call(call).map(Function::from)
+ }
+
+ pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.imp.resolve_method_call_as_callable(call)
+ }
+
+ pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.imp.resolve_field(field)
+ }
+
+ pub fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.imp.resolve_record_field(field)
+ }
+
+ pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.imp.resolve_record_pat_field(field)
+ }
+
+ pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ self.imp.resolve_macro_call(macro_call)
+ }
+
+ pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ self.imp.is_unsafe_macro_call(macro_call)
+ }
+
+ pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ self.imp.resolve_attr_macro_call(item)
+ }
+
+ pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.imp.resolve_path(path)
+ }
+
+ pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ self.imp.resolve_extern_crate(extern_crate)
+ }
+
+ pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
+ self.imp.resolve_variant(record_lit).map(VariantDef::from)
+ }
+
+ pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.imp.resolve_bind_pat_to_const(pat)
+ }
+
+ pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.imp.record_literal_missing_fields(literal)
+ }
+
+ pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.imp.record_pattern_missing_fields(pattern)
+ }
+
+ pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
+ let src = self.imp.find_file(src.syntax()).with_value(src).cloned();
+ T::to_def(&self.imp, src)
+ }
+
+ pub fn to_module_def(&self, file: FileId) -> Option<Module> {
+ self.imp.to_module_def(file).next()
+ }
+
+ pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.imp.to_module_def(file)
+ }
+
+ pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.imp.scope(node)
+ }
+
+ pub fn scope_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SemanticsScope<'db>> {
+ self.imp.scope_at_offset(node, offset)
+ }
+
+ pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ self.imp.scope_for_def(def)
+ }
+
+ pub fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.imp.assert_contains_node(node)
+ }
+
+ pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ self.imp.is_unsafe_method_call(method_call_expr)
+ }
+
+ pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ self.imp.is_unsafe_ref_expr(ref_expr)
+ }
+
+ pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ self.imp.is_unsafe_ident_pat(ident_pat)
+ }
+}
+
+impl<'db> SemanticsImpl<'db> {
+ fn new(db: &'db dyn HirDatabase) -> Self {
+ SemanticsImpl {
+ db,
+ s2d_cache: Default::default(),
+ cache: Default::default(),
+ expansion_info_cache: Default::default(),
+ macro_call_cache: Default::default(),
+ }
+ }
+
+ fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ let tree = self.db.parse(file_id).tree();
+ self.cache(tree.syntax().clone(), file_id.into());
+ tree
+ }
+
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ }
+
+ fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ let sa = self.analyze_no_infer(macro_call.syntax())?;
+ let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
+ let node = self.parse_or_expand(file_id)?;
+ Some(node)
+ }
+
+ fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(item.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
+ self.parse_or_expand(macro_call_id.as_file())
+ }
+
+ fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(attr.clone());
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
+ })?;
+ self.parse_or_expand(call_id.as_file())
+ }
+
+ fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ let calls = self.derive_macro_calls(attr)?;
+ self.with_ctx(|ctx| {
+ Some(
+ calls
+ .into_iter()
+ .map(|call| {
+ macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
+ })
+ .collect(),
+ )
+ })
+ }
+
+ fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ let res: Vec<_> = self
+ .derive_macro_calls(attr)?
+ .into_iter()
+ .flat_map(|call| {
+ let file_id = call?.as_file();
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ })
+ .collect();
+ Some(res)
+ }
+
+ fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, &adt);
+ let src = InFile::new(file_id, attr.clone());
+ self.with_ctx(|ctx| {
+ let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
+ Some(res.to_vec())
+ })
+ }
+
+ fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, adt);
+ self.with_ctx(|ctx| ctx.has_derives(adt))
+ }
+
+ fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ let file_id = self.find_file(item.syntax()).file_id;
+ let src = InFile::new(file_id, item.clone());
+ self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
+ }
+
+ fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let SourceAnalyzer { file_id, resolver, .. } =
+ self.analyze_no_infer(actual_macro_call.syntax())?;
+ let macro_call = InFile::new(file_id, actual_macro_call);
+ let krate = resolver.krate();
+ let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
+ resolver
+ .resolve_path_as_macro(self.db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(self.db.upcast(), it))
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_attr(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let macro_call = self.wrap_node_infile(actual_macro_call.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let attr = self.wrap_node_infile(actual_macro_call.clone());
+ let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
+ let macro_call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ // This might not be the correct way to do this, but it works for now
+ fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ let mut res = smallvec![];
+ let tokens = (|| {
+ let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
+ let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
+ Some((first, last))
+ })();
+ let (first, last) = match tokens {
+ Some(it) => it,
+ None => return res,
+ };
+
+ if first == last {
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ res.push(node)
+ }
+ false
+ });
+ } else {
+ // Descend first and last token, then zip them to look for the node they belong to
+ let mut scratch: SmallVec<[_; 1]> = smallvec![];
+ self.descend_into_macros_impl(first, &mut |token| {
+ scratch.push(token);
+ false
+ });
+
+ let mut scratch = scratch.into_iter();
+ self.descend_into_macros_impl(
+ last,
+ &mut |InFile { value: last, file_id: last_fid }| {
+ if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+ if first_fid == last_fid {
+ if let Some(p) = first.parent() {
+ let range = first.text_range().cover(last.text_range());
+ let node = find_root(&p)
+ .covering_element(range)
+ .ancestors()
+ .take_while(|it| it.text_range() == range)
+ .find_map(N::cast);
+ if let Some(node) = node {
+ res.push(node);
+ }
+ }
+ }
+ }
+ false
+ },
+ );
+ }
+ res
+ }
+
+ fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res.push(value);
+ false
+ });
+ res
+ }
+
+ fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let text = token.text();
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if value.text() == text {
+ res.push(value);
+ }
+ false
+ });
+ if res.is_empty() {
+ res.push(token);
+ }
+ res
+ }
+
+ fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
+ node.parent().map_or(kind, |it| it.kind())
+ }
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let preferred_kind = fetch_kind(&token);
+ let mut res = None;
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if fetch_kind(&value) == preferred_kind {
+ res = Some(value);
+ true
+ } else {
+ if let None = res {
+ res = Some(value)
+ }
+ false
+ }
+ });
+ res.unwrap_or(token)
+ }
+
+ fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ let mut res = token.clone();
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res = value;
+ true
+ });
+ res
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ token: SyntaxToken,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ ) {
+ let _p = profile::span("descend_into_macros");
+ let parent = match token.parent() {
+ Some(it) => it,
+ None => return,
+ };
+ let sa = match self.analyze_no_infer(&parent) {
+ Some(it) => it,
+ None => return,
+ };
+ let def_map = sa.resolver.def_map();
+
+ let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
+ let mut cache = self.expansion_info_cache.borrow_mut();
+ let mut mcache = self.macro_call_cache.borrow_mut();
+
+ let mut process_expansion_for_token =
+ |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
+ .as_ref()?;
+
+ {
+ let InFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id);
+ }
+
+ let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+ let len = stack.len();
+
+ // requeue the tokens we got from mapping our current token down
+ stack.extend(mapped_tokens);
+ // if the length changed we have found a mapping for the token
+ (stack.len() != len).then(|| ())
+ };
+
+ // Remap the next token in the queue into a macro call its in, if it is not being remapped
+ // either due to not being in a macro-call or because its unused push it into the result vec,
+ // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
+ while let Some(token) = stack.pop() {
+ self.db.unwind_if_cancelled();
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(item),
+ token.as_ref(),
+ );
+ }
+
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
+
+ if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall = token.with_value(macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
+ }
+ };
+ process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
+ } else if let Some(meta) = ast::Meta::cast(parent.clone()) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ token.with_value(&adt),
+ token.with_value(attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(adt.into()),
+ token.as_ref(),
+ );
+ }
+ None => Some(adt),
+ }
+ } else {
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
+ return None;
+ }
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
+ let helpers =
+ def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
+ let item = Some(adt.into());
+ let mut res = None;
+ for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_file(),
+ item.clone(),
+ token.as_ref(),
+ ));
+ }
+ res
+ } else {
+ None
+ }
+ })()
+ .is_none();
+
+ if was_not_remapped && f(token) {
+ break;
+ }
+ }
+ }
+
+ // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
+ // traversing the inner iterator when it finds a node.
+ // The outer iterator is over the tokens descendants
+ // The inner iterator is the ancestors of a descendant
+ fn descend_node_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
+ node.token_at_offset(offset)
+ .map(move |token| self.descend_into_macros(token))
+ .map(|descendants| {
+ descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
+ })
+ // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
+ // See algo::ancestors_at_offset, which uses the same approach
+ .kmerge_by(|left, right| {
+ left.clone()
+ .map(|node| node.text_range().len())
+ .lt(right.clone().map(|node| node.text_range().len()))
+ })
+ }
+
+ fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ let node = self.find_file(node);
+ node.original_file_range(self.db.upcast())
+ }
+
+ fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ let node = self.find_file(node);
+ node.original_file_range_opt(self.db.upcast())
+ }
+
+ fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
+ |InFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id);
+ value
+ },
+ )
+ }
+
+ fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+ let root = self.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|it| it.to_node(&root));
+ node.as_ref().original_file_range(self.db.upcast())
+ }
+
+ fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
+ }
+
+ fn ancestors_with_macros(
+ &self,
+ node: SyntaxNode,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ let node = self.find_file(&node);
+ let db = self.db.upcast();
+ iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
+ match value.parent() {
+ Some(parent) => Some(InFile::new(file_id, parent)),
+ None => {
+ self.cache(value.clone(), file_id);
+ file_id.call_node(db)
+ }
+ }
+ })
+ .map(|it| it.value)
+ }
+
+ fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ node.token_at_offset(offset)
+ .map(|token| self.token_ancestors_with_macros(token))
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+ }
+
+ fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ let text = lifetime.text();
+ let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
+ let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
+ gpl.lifetime_params()
+ .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
+ })?;
+ let src = self.wrap_node_infile(lifetime_param);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ let text = lifetime.text();
+ let label = lifetime.syntax().ancestors().find_map(|syn| {
+ let label = match_ast! {
+ match syn {
+ ast::ForExpr(it) => it.label(),
+ ast::WhileExpr(it) => it.label(),
+ ast::LoopExpr(it) => it.label(),
+ ast::BlockExpr(it) => it.label(),
+ _ => None,
+ }
+ };
+ label.filter(|l| {
+ l.lifetime()
+ .and_then(|lt| lt.lifetime_ident_token())
+ .map_or(false, |lt| lt.text() == text)
+ })
+ })?;
+ let src = self.wrap_node_infile(label);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ let analyze = self.analyze(ty.syntax())?;
+ let ctx = body::LowerCtx::new(self.db.upcast(), analyze.file_id);
+ let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
+ .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
+ Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
+ }
+
+ fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+ let analyze = self.analyze(path.syntax())?;
+ let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
+ let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+ match analyze
+ .resolver
+ .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
+ {
+ TypeNs::TraitId(id) => Some(Trait { id }),
+ _ => None,
+ }
+ }
+
+ fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
+ }
+
+ fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.analyze(expr.syntax())?
+ .type_of_expr(self.db, expr)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.analyze(pat.syntax())?
+ .type_of_pat(self.db, pat)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.analyze(param.syntax())?.type_of_self(self.db, param)
+ }
+
+ fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.analyze(pat.syntax())
+ .and_then(|it| it.pattern_adjustments(self.db, pat))
+ .unwrap_or_default()
+ }
+
+ fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
+ }
+
+ fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ self.analyze(call.syntax())?.resolve_method_call(self.db, call)
+ }
+
+ fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
+ }
+
+ fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_field(self.db, field)
+ }
+
+ fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.analyze(field.syntax())?.resolve_record_field(self.db, field)
+ }
+
+ fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
+ }
+
+ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ let sa = self.analyze(macro_call.syntax())?;
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.resolve_macro_call(self.db, macro_call)
+ }
+
+ fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ let sa = match self.analyze(macro_call.syntax()) {
+ Some(it) => it,
+ None => return false,
+ };
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.is_unsafe_macro_call(self.db, macro_call)
+ }
+
+ fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ let item_in_file = self.wrap_node_infile(item.clone());
+ let id = self.with_ctx(|ctx| {
+ let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
+ macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id)
+ })?;
+ Some(Macro { id })
+ }
+
+ fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.analyze(path.syntax())?.resolve_path(self.db, path)
+ }
+
+ fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ let krate = self.scope(extern_crate.syntax())?.krate();
+ let name = extern_crate.name_ref()?.as_name();
+ if name == known::SELF_PARAM {
+ return Some(krate);
+ }
+ krate
+ .dependencies(self.db)
+ .into_iter()
+ .find_map(|dep| (dep.name == name).then(|| dep.krate))
+ }
+
+ fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
+ self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
+ }
+
+ fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
+ }
+
+ fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.analyze(literal.syntax())
+ .and_then(|it| it.record_literal_missing_fields(self.db, literal))
+ .unwrap_or_default()
+ }
+
+ fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.analyze(pattern.syntax())
+ .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
+ .unwrap_or_default()
+ }
+
+ fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
+ let mut cache = self.s2d_cache.borrow_mut();
+ let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
+ f(&mut ctx)
+ }
+
+ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
+ }
+
+ fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ })
+ }
+
+ fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
+ self.analyze_with_offset_no_infer(node, offset).map(
+ |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ },
+ )
+ }
+
+ fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ let file_id = self.db.lookup_intern_trait(def.id).id.file_id();
+ let resolver = def.id.resolver(self.db.upcast());
+ SemanticsScope { db: self.db, file_id, resolver }
+ }
+
+ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ let res = def.source(self.db)?;
+ self.cache(find_root(res.value.syntax()), res.file_id);
+ Some(res)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, true)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, false)
+ }
+
+ fn analyze_with_offset_no_infer(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, Some(offset), false)
+ }
+
+ fn analyze_impl(
+ &self,
+ node: &SyntaxNode,
+ offset: Option<TextSize>,
+ infer_body: bool,
+ ) -> Option<SourceAnalyzer> {
+ let _p = profile::span("Semantics::analyze_impl");
+ let node = self.find_file(node);
+
+ let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
+ Some(it) => it,
+ None => return None,
+ };
+
+ let resolver = match container {
+ ChildContainer::DefWithBodyId(def) => {
+ return Some(if infer_body {
+ SourceAnalyzer::new_for_body(self.db, def, node, offset)
+ } else {
+ SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+ })
+ }
+ ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
+ };
+ Some(SourceAnalyzer::new_for_resolver(resolver, node))
+ }
+
+ fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
+ assert!(root_node.parent().is_none());
+ let mut cache = self.cache.borrow_mut();
+ let prev = cache.insert(root_node, file_id);
+ assert!(prev == None || prev == Some(file_id))
+ }
+
+ fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.find_file(node);
+ }
+
+ fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
+ let cache = self.cache.borrow();
+ cache.get(root_node).copied()
+ }
+
+ fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
+ let InFile { file_id, .. } = self.find_file(node.syntax());
+ InFile::new(file_id, node)
+ }
+
+ /// Wraps the node in a [`InFile`] with the file id it belongs to.
+ fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
+ let root_node = find_root(node);
+ let file_id = self.lookup(&root_node).unwrap_or_else(|| {
+ panic!(
+ "\n\nFailed to lookup {:?} in this Semantics.\n\
+ Make sure to use only query nodes, derived from this instance of Semantics.\n\
+ root node: {:?}\n\
+ known nodes: {}\n\n",
+ node,
+ root_node,
+ self.cache
+ .borrow()
+ .keys()
+ .map(|it| format!("{:?}", it))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ });
+ InFile::new(file_id, node)
+ }
+
+ fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ method_call_expr
+ .receiver()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let ty = self.type_of_expr(&field_expr.expr()?)?.original;
+ if !ty.is_packed(self.db) {
+ return None;
+ }
+
+ let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let res = match func.self_param(self.db)?.access(self.db) {
+ Access::Shared | Access::Exclusive => true,
+ Access::Owned => false,
+ };
+ Some(res)
+ })
+ .unwrap_or(false)
+ }
+
+ fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ ref_expr
+ .expr()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let expr = field_expr.expr()?;
+ self.type_of_expr(&expr)
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+
+ // FIXME This needs layout computation to be correct. It will highlight
+ // more than it should with the current implementation.
+ }
+
+ fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ if ident_pat.ref_token().is_none() {
+ return false;
+ }
+
+ ident_pat
+ .syntax()
+ .parent()
+ .and_then(|parent| {
+ // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
+ // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
+ // so this tries to lookup the `IdentPat` anywhere along that structure to the
+ // `RecordPat` so we can get the containing type.
+ let record_pat = ast::RecordPatField::cast(parent.clone())
+ .and_then(|record_pat| record_pat.syntax().parent())
+ .or_else(|| Some(parent.clone()))
+ .and_then(|parent| {
+ ast::RecordPatFieldList::cast(parent)?
+ .syntax()
+ .parent()
+ .and_then(ast::RecordPat::cast)
+ });
+
+ // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
+ // this is initialized from a `FieldExpr`.
+ if let Some(record_pat) = record_pat {
+ self.type_of_pat(&ast::Pat::RecordPat(record_pat))
+ } else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
+ let field_expr = match let_stmt.initializer()? {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+
+ self.type_of_expr(&field_expr.expr()?)
+ } else {
+ None
+ }
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+ }
+}
+
+fn macro_call_to_macro_id(
+ ctx: &mut SourceToDefCtx<'_, '_>,
+ db: &dyn AstDatabase,
+ macro_call_id: MacroCallId,
+) -> Option<MacroId> {
+ let loc = db.lookup_intern_macro_call(macro_call_id);
+ match loc.def.kind {
+ hir_expand::MacroDefKind::Declarative(it)
+ | hir_expand::MacroDefKind::BuiltIn(_, it)
+ | hir_expand::MacroDefKind::BuiltInAttr(_, it)
+ | hir_expand::MacroDefKind::BuiltInDerive(_, it)
+ | hir_expand::MacroDefKind::BuiltInEager(_, it) => {
+ ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ hir_expand::MacroDefKind::ProcMacro(_, _, it) => {
+ ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ }
+}
+
+pub trait ToDef: AstNode + Clone {
+ type Def;
+
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
+}
+
+macro_rules! to_def_impls {
+ ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
+ impl ToDef for $ast {
+ type Def = $def;
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
+ sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
+ }
+ }
+ )*}
+}
+
+to_def_impls![
+ (crate::Module, ast::Module, module_to_def),
+ (crate::Module, ast::SourceFile, source_file_to_def),
+ (crate::Struct, ast::Struct, struct_to_def),
+ (crate::Enum, ast::Enum, enum_to_def),
+ (crate::Union, ast::Union, union_to_def),
+ (crate::Trait, ast::Trait, trait_to_def),
+ (crate::Impl, ast::Impl, impl_to_def),
+ (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
+ (crate::Const, ast::Const, const_to_def),
+ (crate::Static, ast::Static, static_to_def),
+ (crate::Function, ast::Fn, fn_to_def),
+ (crate::Field, ast::RecordField, record_field_to_def),
+ (crate::Field, ast::TupleField, tuple_field_to_def),
+ (crate::Variant, ast::Variant, enum_variant_to_def),
+ (crate::TypeParam, ast::TypeParam, type_param_to_def),
+ (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
+ (crate::ConstParam, ast::ConstParam, const_param_to_def),
+ (crate::GenericParam, ast::GenericParam, generic_param_to_def),
+ (crate::Macro, ast::Macro, macro_to_def),
+ (crate::Local, ast::IdentPat, bind_pat_to_def),
+ (crate::Local, ast::SelfParam, self_param_to_def),
+ (crate::Label, ast::Label, label_to_def),
+ (crate::Adt, ast::Adt, adt_to_def),
+];
+
+fn find_root(node: &SyntaxNode) -> SyntaxNode {
+ node.ancestors().last().unwrap()
+}
+
+/// `SemanticScope` encapsulates the notion of a scope (the set of visible
+/// names) at a particular program point.
+///
+/// It is a bit tricky, as scopes do not really exist inside the compiler.
+/// Rather, the compiler directly computes for each reference the definition it
+/// refers to. It might transiently compute the explicit scope map while doing
+/// so, but, generally, this is not something left after the analysis.
+///
+/// However, we do very much need explicit scopes for IDE purposes --
+/// completion, at its core, lists the contents of the current scope. The notion
+/// of scope is also useful to answer questions like "what would be the meaning
+/// of this piece of code if we inserted it into this position?".
+///
+/// So `SemanticsScope` is constructed from a specific program point (a syntax
+/// node or just a raw offset) and provides access to the set of visible names
+/// on a somewhat best-effort basis.
+///
+/// Note that if you are wondering "what does this specific existing name mean?",
+/// you'd better use the `resolve_` family of methods.
+#[derive(Debug)]
+pub struct SemanticsScope<'a> {
+ pub db: &'a dyn HirDatabase,
+ file_id: HirFileId,
+ resolver: Resolver,
+}
+
+impl<'a> SemanticsScope<'a> {
+ pub fn module(&self) -> Module {
+ Module { id: self.resolver.module() }
+ }
+
+ pub fn krate(&self) -> Crate {
+ Crate { id: self.resolver.krate() }
+ }
+
+ pub(crate) fn resolver(&self) -> &Resolver {
+ &self.resolver
+ }
+
+ /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
+ pub fn visible_traits(&self) -> VisibleTraits {
+ let resolver = &self.resolver;
+ VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
+ }
+
+ pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let scope = self.resolver.names_in_scope(self.db.upcast());
+ for (name, entries) in scope {
+ for entry in entries {
+ let def = match entry {
+ resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
+ resolver::ScopeDef::Unknown => ScopeDef::Unknown,
+ resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
+ resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
+ resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
+ resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
+ None => continue,
+ },
+ resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Label(Label { parent, label_id }),
+ None => continue,
+ },
+ };
+ f(name.clone(), def)
+ }
+ }
+ }
+
+ /// Resolve a path as-if it was written at the given scope. This is
+ /// necessary a heuristic, as it doesn't take hygiene into account.
+ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
+ let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
+ let path = Path::from_src(path.clone(), &ctx)?;
+ resolve_hir_path(self.db, &self.resolver, &path)
+ }
+
+ /// Iterates over associated types that may be specified after the given path (using
+ /// `Ty::Assoc` syntax).
+ pub fn assoc_type_shorthand_candidates<R>(
+ &self,
+ resolution: &PathResolution,
+ mut cb: impl FnMut(&Name, TypeAlias) -> Option<R>,
+ ) -> Option<R> {
+ let def = self.resolver.generic_def()?;
+ hir_ty::associated_type_shorthand_candidates(
+ self.db,
+ def,
+ resolution.in_type_ns()?,
+ |name, _, id| cb(name, id.into()),
+ )
+ }
+}
+
+pub struct VisibleTraits(pub FxHashSet<TraitId>);
+
+impl ops::Deref for VisibleTraits {
+ type Target = FxHashSet<TraitId>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
new file mode 100644
index 000000000..ba9a1cfb6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -0,0 +1,473 @@
+//! Maps *syntax* of various definitions to their semantic ids.
+//!
+//! This is a very interesting module, and, in some sense, can be considered the
+//! heart of the IDE parts of rust-analyzer.
+//!
+//! This module solves the following problem:
+//!
+//! Given a piece of syntax, find the corresponding semantic definition (def).
+//!
+//! This problem is a part of more-or-less every IDE feature implemented. Every
+//! IDE functionality (like goto to definition), conceptually starts with a
+//! specific cursor position in a file. Starting with this text offset, we first
+//! figure out what syntactic construct are we at: is this a pattern, an
+//! expression, an item definition.
+//!
+//! Knowing only the syntax gives us relatively little info. For example,
+//! looking at the syntax of the function we can realise that it is a part of an
+//! `impl` block, but we won't be able to tell what trait function the current
+//! function overrides, and whether it does that correctly. For that, we need to
+//! go from [`ast::Fn`] to [`crate::Function`], and that's exactly what this
+//! module does.
+//!
+//! As syntax trees are values and don't know their place of origin/identity,
+//! this module also requires [`InFile`] wrappers to understand which specific
+//! real or macro-expanded file the tree comes from.
+//!
+//! The actual algorithm to resolve syntax to def is curious in two aspects:
+//!
+//! * It is recursive
+//! * It uses the inverse algorithm (what is the syntax for this def?)
+//!
+//! Specifically, the algorithm goes like this:
+//!
+//! 1. Find the syntactic container for the syntax. For example, field's
+//! container is the struct, and structs container is a module.
+//! 2. Recursively get the def corresponding to container.
+//! 3. Ask the container def for all child defs. These child defs contain
+//! the answer and answer's siblings.
+//! 4. For each child def, ask for it's source.
+//! 5. The child def whose source is the syntax node we've started with
+//! is the answer.
+//!
+//! It's interesting that both Roslyn and Kotlin contain very similar code
+//! shape.
+//!
+//! Let's take a look at Roslyn:
+//!
+//! <https://github.com/dotnet/roslyn/blob/36a0c338d6621cc5fe34b79d414074a95a6a489c/src/Compilers/CSharp/Portable/Compilation/SyntaxTreeSemanticModel.cs#L1403-L1429>
+//! <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1403>
+//!
+//! The `GetDeclaredType` takes `Syntax` as input, and returns `Symbol` as
+//! output. First, it retrieves a `Symbol` for parent `Syntax`:
+//!
+//! * <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1423>
+//!
+//! Then, it iterates parent symbol's children, looking for one which has the
+//! same text span as the original node:
+//!
+//! <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1786>
+//!
+//! Now, let's look at Kotlin:
+//!
+//! <https://github.com/JetBrains/kotlin/blob/a288b8b00e4754a1872b164999c6d3f3b8c8994a/idea/idea-frontend-fir/idea-fir-low-level-api/src/org/jetbrains/kotlin/idea/fir/low/level/api/FirModuleResolveStateImpl.kt#L93-L125>
+//!
+//! This function starts with a syntax node (`KtExpression` is syntax, like all
+//! `Kt` nodes), and returns a def. It uses
+//! `getNonLocalContainingOrThisDeclaration` to get syntactic container for a
+//! current node. Then, `findSourceNonLocalFirDeclaration` gets `Fir` for this
+//! parent. Finally, `findElementIn` function traverses `Fir` children to find
+//! one with the same source we originally started with.
+//!
+//! One question is left though -- where does the recursion stops? This happens
+//! when we get to the file syntax node, which doesn't have a syntactic parent.
+//! In that case, we loop through all the crates that might contain this file
+//! and look for a module whose source is the given file.
+//!
+//! Note that the logic in this module is somewhat fundamentally imprecise --
+//! due to conditional compilation and `#[path]` attributes, there's no
+//! injective mapping from syntax nodes to defs. This is not an edge case --
+//! more or less every item in a `lib.rs` is a part of two distinct crates: a
+//! library with `--cfg test` and a library without.
+//!
+//! At the moment, we don't really handle this well and return the first answer
+//! that works. Ideally, we should first let the caller to pick a specific
+//! active crate for a given position, and then provide an API to resolve all
+//! syntax nodes against this specific crate.
+
+use base_db::FileId;
+use hir_def::{
+ attr::AttrId,
+ child_by_source::ChildBySource,
+ dyn_map::DynMap,
+ expr::{LabelId, PatId},
+ keys::{self, Key},
+ AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
+ GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
+};
+use hir_expand::{name::AsName, HirFileId, MacroCallId};
+use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
+use stdx::impl_from;
+use syntax::{
+ ast::{self, HasName},
+ AstNode, SyntaxNode,
+};
+
+use crate::{db::HirDatabase, InFile};
+
+pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap>;
+
+pub(super) struct SourceToDefCtx<'a, 'b> {
+ pub(super) db: &'b dyn HirDatabase,
+ pub(super) cache: &'a mut SourceToDefCache,
+}
+
+impl SourceToDefCtx<'_, '_> {
+ pub(super) fn file_to_def(&mut self, file: FileId) -> SmallVec<[ModuleId; 1]> {
+ let _p = profile::span("SourceBinder::to_module_def");
+ let mut mods = SmallVec::new();
+ for &crate_id in self.db.relevant_crates(file).iter() {
+ // FIXME: inner items
+ let crate_def_map = self.db.crate_def_map(crate_id);
+ mods.extend(
+ crate_def_map
+ .modules_for_file(file)
+ .map(|local_id| crate_def_map.module_id(local_id)),
+ )
+ }
+ mods
+ }
+
+ pub(super) fn module_to_def(&mut self, src: InFile<ast::Module>) -> Option<ModuleId> {
+ let _p = profile::span("module_to_def");
+ let parent_declaration = src
+ .syntax()
+ .ancestors_with_macros_skip_attr_item(self.db.upcast())
+ .find_map(|it| it.map(ast::Module::cast).transpose());
+
+ let parent_module = match parent_declaration {
+ Some(parent_declaration) => self.module_to_def(parent_declaration),
+ None => {
+ let file_id = src.file_id.original_file(self.db.upcast());
+ self.file_to_def(file_id).get(0).copied()
+ }
+ }?;
+
+ let child_name = src.value.name()?.as_name();
+ let def_map = parent_module.def_map(self.db.upcast());
+ let &child_id = def_map[parent_module.local_id].children.get(&child_name)?;
+ Some(def_map.module_id(child_id))
+ }
+
+ pub(super) fn source_file_to_def(&mut self, src: InFile<ast::SourceFile>) -> Option<ModuleId> {
+ let _p = profile::span("source_file_to_def");
+ let file_id = src.file_id.original_file(self.db.upcast());
+ self.file_to_def(file_id).get(0).copied()
+ }
+
+ pub(super) fn trait_to_def(&mut self, src: InFile<ast::Trait>) -> Option<TraitId> {
+ self.to_def(src, keys::TRAIT)
+ }
+ pub(super) fn impl_to_def(&mut self, src: InFile<ast::Impl>) -> Option<ImplId> {
+ self.to_def(src, keys::IMPL)
+ }
+ pub(super) fn fn_to_def(&mut self, src: InFile<ast::Fn>) -> Option<FunctionId> {
+ self.to_def(src, keys::FUNCTION)
+ }
+ pub(super) fn struct_to_def(&mut self, src: InFile<ast::Struct>) -> Option<StructId> {
+ self.to_def(src, keys::STRUCT)
+ }
+ pub(super) fn enum_to_def(&mut self, src: InFile<ast::Enum>) -> Option<EnumId> {
+ self.to_def(src, keys::ENUM)
+ }
+ pub(super) fn union_to_def(&mut self, src: InFile<ast::Union>) -> Option<UnionId> {
+ self.to_def(src, keys::UNION)
+ }
+ pub(super) fn static_to_def(&mut self, src: InFile<ast::Static>) -> Option<StaticId> {
+ self.to_def(src, keys::STATIC)
+ }
+ pub(super) fn const_to_def(&mut self, src: InFile<ast::Const>) -> Option<ConstId> {
+ self.to_def(src, keys::CONST)
+ }
+ pub(super) fn type_alias_to_def(&mut self, src: InFile<ast::TypeAlias>) -> Option<TypeAliasId> {
+ self.to_def(src, keys::TYPE_ALIAS)
+ }
+ pub(super) fn record_field_to_def(&mut self, src: InFile<ast::RecordField>) -> Option<FieldId> {
+ self.to_def(src, keys::RECORD_FIELD)
+ }
+ pub(super) fn tuple_field_to_def(&mut self, src: InFile<ast::TupleField>) -> Option<FieldId> {
+ self.to_def(src, keys::TUPLE_FIELD)
+ }
+ pub(super) fn enum_variant_to_def(
+ &mut self,
+ src: InFile<ast::Variant>,
+ ) -> Option<EnumVariantId> {
+ self.to_def(src, keys::VARIANT)
+ }
+ pub(super) fn adt_to_def(
+ &mut self,
+ InFile { file_id, value }: InFile<ast::Adt>,
+ ) -> Option<AdtId> {
+ match value {
+ ast::Adt::Enum(it) => self.enum_to_def(InFile::new(file_id, it)).map(AdtId::EnumId),
+ ast::Adt::Struct(it) => {
+ self.struct_to_def(InFile::new(file_id, it)).map(AdtId::StructId)
+ }
+ ast::Adt::Union(it) => self.union_to_def(InFile::new(file_id, it)).map(AdtId::UnionId),
+ }
+ }
+ pub(super) fn bind_pat_to_def(
+ &mut self,
+ src: InFile<ast::IdentPat>,
+ ) -> Option<(DefWithBodyId, PatId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (body, source_map) = self.db.body_with_source_map(container);
+ let src = src.map(ast::Pat::from);
+ let pat_id = source_map.node_pat(src.as_ref())?;
+ // the pattern could resolve to a constant, verify that that is not the case
+ if let crate::Pat::Bind { .. } = body[pat_id] {
+ Some((container, pat_id))
+ } else {
+ None
+ }
+ }
+ pub(super) fn self_param_to_def(
+ &mut self,
+ src: InFile<ast::SelfParam>,
+ ) -> Option<(DefWithBodyId, PatId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (_body, source_map) = self.db.body_with_source_map(container);
+ let pat_id = source_map.node_self_param(src.as_ref())?;
+ Some((container, pat_id))
+ }
+ pub(super) fn label_to_def(
+ &mut self,
+ src: InFile<ast::Label>,
+ ) -> Option<(DefWithBodyId, LabelId)> {
+ let container = self.find_pat_or_label_container(src.syntax())?;
+ let (_body, source_map) = self.db.body_with_source_map(container);
+ let label_id = source_map.node_label(src.as_ref())?;
+ Some((container, label_id))
+ }
+
+ pub(super) fn item_to_macro_call(&mut self, src: InFile<ast::Item>) -> Option<MacroCallId> {
+ let map = self.dyn_map(src.as_ref())?;
+ map[keys::ATTR_MACRO_CALL].get(&src.value).copied()
+ }
+
+ /// (AttrId, derive attribute call id, derive call ids)
+ pub(super) fn attr_to_derive_macro_call(
+ &mut self,
+ item: InFile<&ast::Adt>,
+ src: InFile<ast::Attr>,
+ ) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
+ let map = self.dyn_map(item)?;
+ map[keys::DERIVE_MACRO_CALL]
+ .get(&src.value)
+ .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
+ }
+
+ pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
+ self.dyn_map(adt).as_ref().map_or(false, |map| !map[keys::DERIVE_MACRO_CALL].is_empty())
+ }
+
+ fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
+ &mut self,
+ src: InFile<Ast>,
+ key: Key<Ast, ID>,
+ ) -> Option<ID> {
+ self.dyn_map(src.as_ref())?[key].get(&src.value).copied()
+ }
+
+ fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> {
+ let container = self.find_container(src.map(|it| it.syntax()))?;
+ Some(self.cache_for(container, src.file_id))
+ }
+
+ fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
+ let db = self.db;
+ self.cache
+ .entry((container, file_id))
+ .or_insert_with(|| container.child_by_source(db, file_id))
+ }
+
+ pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|x| TypeParamId::from_unchecked(x))
+ }
+
+ pub(super) fn lifetime_param_to_def(
+ &mut self,
+ src: InFile<ast::LifetimeParam>,
+ ) -> Option<LifetimeParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::LIFETIME_PARAM].get(&src.value).copied()
+ }
+
+ pub(super) fn const_param_to_def(
+ &mut self,
+ src: InFile<ast::ConstParam>,
+ ) -> Option<ConstParamId> {
+ let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
+ let dyn_map = self.cache_for(container, src.file_id);
+ dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(|x| ConstParamId::from_unchecked(x))
+ }
+
+ pub(super) fn generic_param_to_def(
+ &mut self,
+ InFile { file_id, value }: InFile<ast::GenericParam>,
+ ) -> Option<GenericParamId> {
+ match value {
+ ast::GenericParam::ConstParam(it) => {
+ self.const_param_to_def(InFile::new(file_id, it)).map(GenericParamId::ConstParamId)
+ }
+ ast::GenericParam::LifetimeParam(it) => self
+ .lifetime_param_to_def(InFile::new(file_id, it))
+ .map(GenericParamId::LifetimeParamId),
+ ast::GenericParam::TypeParam(it) => {
+ self.type_param_to_def(InFile::new(file_id, it)).map(GenericParamId::TypeParamId)
+ }
+ }
+ }
+
+ pub(super) fn macro_to_def(&mut self, src: InFile<ast::Macro>) -> Option<MacroId> {
+ self.dyn_map(src.as_ref()).and_then(|it| match &src.value {
+ ast::Macro::MacroRules(value) => {
+ it[keys::MACRO_RULES].get(value).copied().map(MacroId::from)
+ }
+ ast::Macro::MacroDef(value) => it[keys::MACRO2].get(value).copied().map(MacroId::from),
+ })
+ }
+
+ pub(super) fn proc_macro_to_def(&mut self, src: InFile<ast::Fn>) -> Option<MacroId> {
+ self.dyn_map(src.as_ref())
+ .and_then(|it| it[keys::PROC_MACRO].get(&src.value).copied().map(MacroId::from))
+ }
+
+ pub(super) fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
+ for container in src.ancestors_with_macros_skip_attr_item(self.db.upcast()) {
+ if let Some(res) = self.container_to_def(container) {
+ return Some(res);
+ }
+ }
+
+ let def = self.file_to_def(src.file_id.original_file(self.db.upcast())).get(0).copied()?;
+ Some(def.into())
+ }
+
+ fn container_to_def(&mut self, container: InFile<SyntaxNode>) -> Option<ChildContainer> {
+ let cont = if let Some(item) = ast::Item::cast(container.value.clone()) {
+ match item {
+ ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
+ ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
+ ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
+ ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
+ ast::Item::TypeAlias(it) => {
+ self.type_alias_to_def(container.with_value(it))?.into()
+ }
+ ast::Item::Struct(it) => {
+ let def = self.struct_to_def(container.with_value(it))?;
+ VariantId::from(def).into()
+ }
+ ast::Item::Union(it) => {
+ let def = self.union_to_def(container.with_value(it))?;
+ VariantId::from(def).into()
+ }
+ ast::Item::Fn(it) => {
+ let def = self.fn_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ ast::Item::Static(it) => {
+ let def = self.static_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ ast::Item::Const(it) => {
+ let def = self.const_to_def(container.with_value(it))?;
+ DefWithBodyId::from(def).into()
+ }
+ _ => return None,
+ }
+ } else {
+ let it = ast::Variant::cast(container.value)?;
+ let def = self.enum_variant_to_def(InFile::new(container.file_id, it))?;
+ VariantId::from(def).into()
+ };
+ Some(cont)
+ }
+
+ fn find_generic_param_container(&mut self, src: InFile<&SyntaxNode>) -> Option<GenericDefId> {
+ let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
+ for InFile { file_id, value } in ancestors {
+ let item = match ast::Item::cast(value) {
+ Some(it) => it,
+ None => continue,
+ };
+ let res: GenericDefId = match item {
+ ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Struct(it) => self.struct_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Enum(it) => self.enum_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Trait(it) => self.trait_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::TypeAlias(it) => {
+ self.type_alias_to_def(InFile::new(file_id, it))?.into()
+ }
+ ast::Item::Impl(it) => self.impl_to_def(InFile::new(file_id, it))?.into(),
+ _ => continue,
+ };
+ return Some(res);
+ }
+ None
+ }
+
+ fn find_pat_or_label_container(&mut self, src: InFile<&SyntaxNode>) -> Option<DefWithBodyId> {
+ let ancestors = src.ancestors_with_macros_skip_attr_item(self.db.upcast());
+ for InFile { file_id, value } in ancestors {
+ let item = match ast::Item::cast(value) {
+ Some(it) => it,
+ None => continue,
+ };
+ let res: DefWithBodyId = match item {
+ ast::Item::Const(it) => self.const_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Static(it) => self.static_to_def(InFile::new(file_id, it))?.into(),
+ ast::Item::Fn(it) => self.fn_to_def(InFile::new(file_id, it))?.into(),
+ _ => continue,
+ };
+ return Some(res);
+ }
+ None
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+pub(crate) enum ChildContainer {
+ DefWithBodyId(DefWithBodyId),
+ ModuleId(ModuleId),
+ TraitId(TraitId),
+ ImplId(ImplId),
+ EnumId(EnumId),
+ VariantId(VariantId),
+ TypeAliasId(TypeAliasId),
+ /// XXX: this might be the same def as, for example an `EnumId`. However,
+ /// here the children are generic parameters, and not, eg enum variants.
+ GenericDefId(GenericDefId),
+}
+impl_from! {
+ DefWithBodyId,
+ ModuleId,
+ TraitId,
+ ImplId,
+ EnumId,
+ VariantId,
+ TypeAliasId,
+ GenericDefId
+ for ChildContainer
+}
+
+impl ChildContainer {
+ fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
+ let db = db.upcast();
+ match self {
+ ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
+ ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
+ ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
+ ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
+ ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
+ ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
+ ChildContainer::TypeAliasId(_) => DynMap::default(),
+ ChildContainer::GenericDefId(it) => it.child_by_source(db, file_id),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
new file mode 100644
index 000000000..1eb51b20c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -0,0 +1,915 @@
+//! Lookup hir elements using positions in the source code. This is a lossy
+//! transformation: in general, a single source might correspond to several
+//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on
+//! modules.
+//!
+//! So, this modules should not be used during hir construction, it exists
+//! purely for "IDE needs".
+use std::{
+ iter::{self, once},
+ sync::Arc,
+};
+
+use hir_def::{
+ body::{
+ self,
+ scope::{ExprScopes, ScopeId},
+ Body, BodySourceMap,
+ },
+ expr::{ExprId, Pat, PatId},
+ macro_id_to_def_id,
+ path::{ModPath, Path, PathKind},
+ resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
+ type_ref::Mutability,
+ AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId,
+ Lookup, ModuleDefId, VariantId,
+};
+use hir_expand::{
+ builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name::AsName, HirFileId, InFile,
+};
+use hir_ty::{
+ diagnostics::{
+ record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions,
+ UnsafeExpr,
+ },
+ method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution,
+ TyExt, TyKind, TyLoweringContext,
+};
+use itertools::Itertools;
+use smallvec::SmallVec;
+use syntax::{
+ ast::{self, AstNode},
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
+ BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
+ Struct, ToolModule, Trait, Type, TypeAlias, Variant,
+};
+
+/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
+/// original source files. It should not be used inside the HIR itself.
+#[derive(Debug)]
+pub(crate) struct SourceAnalyzer {
+ pub(crate) file_id: HirFileId,
+ pub(crate) resolver: Resolver,
+ def: Option<(DefWithBodyId, Arc<Body>, Arc<BodySourceMap>)>,
+ infer: Option<Arc<InferenceResult>>,
+}
+
+impl SourceAnalyzer {
+ pub(crate) fn new_for_body(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer {
+ resolver,
+ def: Some((def, body, source_map)),
+ infer: Some(db.infer(def)),
+ file_id,
+ }
+ }
+
+ pub(crate) fn new_for_body_no_infer(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
+ }
+
+ pub(crate) fn new_for_resolver(
+ resolver: Resolver,
+ node: InFile<&SyntaxNode>,
+ ) -> SourceAnalyzer {
+ SourceAnalyzer { resolver, def: None, infer: None, file_id: node.file_id }
+ }
+
+ fn body_source_map(&self) -> Option<&BodySourceMap> {
+ self.def.as_ref().map(|(.., source_map)| &**source_map)
+ }
+ fn body(&self) -> Option<&Body> {
+ self.def.as_ref().map(|(_, body, _)| &**body)
+ }
+
+ fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<ExprId> {
+ let src = match expr {
+ ast::Expr::MacroExpr(expr) => {
+ self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?.clone()))?
+ }
+ _ => InFile::new(self.file_id, expr.clone()),
+ };
+ let sm = self.body_source_map()?;
+ sm.node_expr(src.as_ref())
+ }
+
+ fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
+ // FIXME: macros, see `expr_id`
+ let src = InFile { file_id: self.file_id, value: pat };
+ self.body_source_map()?.node_pat(src)
+ }
+
+ fn expand_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: InFile<ast::MacroCall>,
+ ) -> Option<InFile<ast::Expr>> {
+ let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
+ let expanded = db.parse_or_expand(macro_file)?;
+
+ let res = match ast::MacroCall::cast(expanded.clone()) {
+ Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?,
+ _ => InFile::new(macro_file, ast::Expr::cast(expanded)?),
+ };
+ Some(res)
+ }
+
+ pub(crate) fn is_implicit_reborrow(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<Mutability> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let adjustments = infer.expr_adjustments.get(&expr_id)?;
+ adjustments.windows(2).find_map(|slice| match slice {
+ &[Adjustment {kind: Adjust::Deref(None), ..}, Adjustment {kind: Adjust::Borrow(AutoBorrow::Ref(m)), ..}] => Some(match m {
+ hir_ty::Mutability::Mut => Mutability::Mut,
+ hir_ty::Mutability::Not => Mutability::Shared,
+ }),
+ _ => None,
+ })
+ }
+
+ pub(crate) fn type_of_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<(Type, Option<Type>)> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .expr_adjustments
+ .get(&expr_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
+ let ty = infer[expr_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_pat(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<(Type, Option<Type>)> {
+ let pat_id = self.pat_id(pat)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .pat_adjustments
+ .get(&pat_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.clone()));
+ let ty = infer[pat_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_self(
+ &self,
+ db: &dyn HirDatabase,
+ param: &ast::SelfParam,
+ ) -> Option<Type> {
+ let src = InFile { file_id: self.file_id, value: param };
+ let pat_id = self.body_source_map()?.node_self_param(src)?;
+ let ty = self.infer.as_ref()?[pat_id].clone();
+ Some(Type::new_with_resolver(db, &self.resolver, ty))
+ }
+
+ pub(crate) fn binding_mode_of_pat(
+ &self,
+ _db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<BindingMode> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let infer = self.infer.as_ref()?;
+ infer.pat_binding_modes.get(&pat_id).map(|bm| match bm {
+ hir_ty::BindingMode::Move => BindingMode::Move,
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
+ BindingMode::Ref(Mutability::Shared)
+ }
+ })
+ }
+ pub(crate) fn pattern_adjustments(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<SmallVec<[Type; 1]>> {
+ let pat_id = self.pat_id(&pat)?;
+ let infer = self.infer.as_ref()?;
+ Some(
+ infer
+ .pat_adjustments
+ .get(&pat_id)?
+ .iter()
+ .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
+ .collect(),
+ )
+ }
+
+ pub(crate) fn resolve_method_call_as_callable(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Callable> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let ty = db.value_ty(func.into()).substitute(Interner, &substs);
+ let ty = Type::new_with_resolver(db, &self.resolver, ty);
+ let mut res = ty.as_callable(db)?;
+ res.is_bound_method = true;
+ Some(res)
+ }
+
+ pub(crate) fn resolve_method_call(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<FunctionId> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let f_in_impl = self.resolve_impl_method(db, f_in_trait, &substs);
+ f_in_impl.or(Some(f_in_trait))
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_record_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let expr = ast::Expr::from(record_expr);
+ let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?;
+
+ let local_name = field.field_name()?.as_name();
+ let local = if field.name_ref().is_some() {
+ None
+ } else {
+ let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
+ match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::LocalBinding(pat_id)) => {
+ Some(Local { pat_id, parent: self.resolver.body_owner()? })
+ }
+ _ => None,
+ }
+ };
+ let (_, subst) = self.infer.as_ref()?.type_of_expr.get(expr_id)?.as_adt()?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_expr(expr_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
+ let field_ty =
+ db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
+ Some((field.into(), local, Type::new_with_resolver(db, &self.resolver, field_ty)))
+ }
+
+ pub(crate) fn resolve_record_pat_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordPatField,
+ ) -> Option<Field> {
+ let field_name = field.field_name()?.as_name();
+ let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let pat_id = self.pat_id(&record_pat.into())?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
+ Some(field.into())
+ }
+
+ pub(crate) fn resolve_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<Macro> {
+ let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id);
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_bind_pat_to_const(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<ModuleDef> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let body = self.body()?;
+ let path = match &body[pat_id] {
+ Pat::Path(path) => path,
+ _ => return None,
+ };
+ let res = resolve_hir_path(db, &self.resolver, path)?;
+ match res {
+ PathResolution::Def(def) => Some(def),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn resolve_path(
+ &self,
+ db: &dyn HirDatabase,
+ path: &ast::Path,
+ ) -> Option<PathResolution> {
+ let parent = path.syntax().parent();
+ let parent = || parent.clone();
+
+ let mut prefer_value_ns = false;
+ let resolved = (|| {
+ if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
+ let expr_id = self.expr_id(db, &path_expr.into())?;
+ let infer = self.infer.as_ref()?;
+ if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
+ let assoc = match assoc {
+ AssocItemId::FunctionId(f_in_trait) => {
+ match infer.type_of_expr.get(expr_id) {
+ None => assoc,
+ Some(func_ty) => {
+ if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
+ self.resolve_impl_method(db, f_in_trait, subs)
+ .map(AssocItemId::FunctionId)
+ .unwrap_or(assoc)
+ } else {
+ assoc
+ }
+ }
+ }
+ }
+
+ _ => assoc,
+ };
+
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ infer.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ prefer_value_ns = true;
+ } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
+ let pat_id = self.pat_id(&path_pat.into())?;
+ if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
+ let expr_id = self.expr_id(db, &rec_lit.into())?;
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else {
+ let record_pat = parent().and_then(ast::RecordPat::cast).map(ast::Pat::from);
+ let tuple_struct_pat =
+ || parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
+ if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
+ let pat_id = self.pat_id(&pat)?;
+ let variant_res_for_pat =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id);
+ if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ }
+ }
+ None
+ })();
+ if let Some(_) = resolved {
+ return resolved;
+ }
+
+ // This must be a normal source file rather than macro file.
+ let hygiene = Hygiene::new(db.upcast(), self.file_id);
+ let ctx = body::LowerCtx::with_hygiene(db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+
+ // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
+ // trying to resolve foo::bar.
+ if let Some(use_tree) = parent().and_then(ast::UseTree::cast) {
+ if use_tree.coloncolon_token().is_some() {
+ return resolve_hir_path_qualifier(db, &self.resolver, &hir_path);
+ }
+ }
+
+ let meta_path = path
+ .syntax()
+ .ancestors()
+ .take_while(|it| {
+ let kind = it.kind();
+ ast::Path::can_cast(kind) || ast::Meta::can_cast(kind)
+ })
+ .last()
+ .and_then(ast::Meta::cast);
+
+ // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
+ // trying to resolve foo::bar.
+ if path.parent_path().is_some() {
+ return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) {
+ None if meta_path.is_some() => {
+ path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ })
+ }
+ res => res,
+ };
+ } else if let Some(meta_path) = meta_path {
+ // Case where we are resolving the final path segment of a path in an attribute
+ // in this case we have to check for inert/builtin attributes and tools and prioritize
+ // resolution of attributes over other namespaces
+ if let Some(name_ref) = path.as_single_name_ref() {
+ let builtin =
+ BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text());
+ if let Some(_) = builtin {
+ return builtin.map(PathResolution::BuiltinAttr);
+ }
+
+ if let Some(attr) = meta_path.parent_attr() {
+ let adt = if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ };
+ if let Some(adt) = adt {
+ let ast_id = db.ast_id_map(self.file_id).ast_id(&adt);
+ if let Some(helpers) = self
+ .resolver
+ .def_map()
+ .derive_helpers_in_scope(InFile::new(self.file_id, ast_id))
+ {
+ // FIXME: Multiple derives can have the same helper
+ let name_ref = name_ref.as_name();
+ for (macro_id, mut helpers) in
+ helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
+ {
+ if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
+ {
+ return Some(PathResolution::DeriveHelper(DeriveHelper {
+ derive: *macro_id,
+ idx,
+ }));
+ }
+ }
+ }
+ }
+ }
+ }
+ return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) {
+ Some(m) => Some(PathResolution::Def(ModuleDef::Macro(m))),
+ // this labels any path that starts with a tool module as the tool itself, this is technically wrong
+ // but there is no benefit in differentiating these two cases for the time being
+ None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ }),
+ };
+ }
+ if parent().map_or(false, |it| ast::Visibility::can_cast(it.kind())) {
+ resolve_hir_path_qualifier(db, &self.resolver, &hir_path)
+ } else {
+ resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns)
+ }
+ }
+
+ pub(crate) fn record_literal_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ literal: &ast::RecordExpr,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let expr_id = self.expr_id(db, &literal.clone().into())?;
+ let substs = infer.type_of_expr[expr_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ pub(crate) fn record_pattern_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ pattern: &ast::RecordPat,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let pat_id = self.pat_id(&pattern.clone().into())?;
+ let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ fn missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ substs: &Substitution,
+ variant: VariantId,
+ missing_fields: Vec<LocalFieldId>,
+ ) -> Vec<(Field, Type)> {
+ let field_types = db.field_types(variant);
+
+ missing_fields
+ .into_iter()
+ .map(|local_id| {
+ let field = FieldId { parent: variant, local_id };
+ let ty = field_types[local_id].clone().substitute(Interner, substs);
+ (field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty))
+ })
+ .collect()
+ }
+
+ pub(crate) fn expand(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<HirFileId> {
+ let krate = self.resolver.krate();
+ let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
+ self.resolver
+ .resolve_path_as_macro(db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(db.upcast(), it))
+ })?;
+ Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ }
+
+ pub(crate) fn resolve_variant(
+ &self,
+ db: &dyn HirDatabase,
+ record_lit: ast::RecordExpr,
+ ) -> Option<VariantId> {
+ let infer = self.infer.as_ref()?;
+ let expr_id = self.expr_id(db, &record_lit.into())?;
+ infer.variant_resolution_for_expr(expr_id)
+ }
+
+ pub(crate) fn is_unsafe_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> bool {
+ // check for asm/global_asm
+ if let Some(mac) = self.resolve_macro_call(db, macro_call) {
+ let ex = match mac.id {
+ hir_def::MacroId::Macro2Id(it) => it.lookup(db.upcast()).expander,
+ hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
+ _ => hir_def::MacroExpander::Declarative,
+ };
+ match ex {
+ hir_def::MacroExpander::BuiltIn(e)
+ if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
+ {
+ return true
+ }
+ _ => (),
+ }
+ }
+ let macro_expr = match macro_call
+ .map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
+ .transpose()
+ {
+ Some(it) => it,
+ None => return false,
+ };
+
+ if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
+ if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr.as_ref()) {
+ let mut is_unsafe = false;
+ unsafe_expressions(
+ db,
+ infer,
+ *def,
+ body,
+ expanded_expr,
+ &mut |UnsafeExpr { inside_unsafe_block, .. }| is_unsafe |= !inside_unsafe_block,
+ );
+ return is_unsafe;
+ }
+ }
+ false
+ }
+
+ fn resolve_impl_method(
+ &self,
+ db: &dyn HirDatabase,
+ func: FunctionId,
+ substs: &Substitution,
+ ) -> Option<FunctionId> {
+ let impled_trait = match func.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(trait_id) => trait_id,
+ _ => return None,
+ };
+ if substs.is_empty(Interner) {
+ return None;
+ }
+ let self_ty = substs.at(Interner, 0).ty(Interner)?;
+ let krate = self.resolver.krate();
+ let trait_env = self.resolver.body_owner()?.as_generic_def_id().map_or_else(
+ || Arc::new(hir_ty::TraitEnvironment::empty(krate)),
+ |d| db.trait_environment(d),
+ );
+
+ let fun_data = db.function_data(func);
+ method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name)
+ }
+}
+
+fn scope_for(
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ node: InFile<&SyntaxNode>,
+) -> Option<ScopeId> {
+ node.value
+ .ancestors()
+ .filter_map(ast::Expr::cast)
+ .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it)))
+ .find_map(|it| scopes.scope_for(it))
+}
+
+fn scope_for_offset(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ if from_file == file_id {
+ return Some((value.text_range(), scope));
+ }
+
+ // FIXME handle attribute expansion
+ let source = iter::successors(file_id.call_node(db.upcast()), |it| {
+ it.file_id.call_node(db.upcast())
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ Some((source.value.text_range(), scope))
+ })
+ .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
+ // find containing scope
+ .min_by_key(|(expr_range, _scope)| expr_range.len())
+ .map(|(expr_range, scope)| {
+ adjust(db, scopes, source_map, expr_range, from_file, offset).unwrap_or(*scope)
+ })
+}
+
+// XXX: during completion, cursor might be outside of any particular
+// expression. Try to figure out the correct scope...
+fn adjust(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ expr_range: TextRange,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ let child_scopes = scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let source = source_map.expr_syntax(*id).ok()?;
+ // FIXME: correctly handle macro expansion
+ if source.file_id != from_file {
+ return None;
+ }
+ let root = source.file_syntax(db.upcast());
+ let node = source.value.to_node(&root);
+ Some((node.syntax().text_range(), scope))
+ })
+ .filter(|&(range, _)| {
+ range.start() <= offset && expr_range.contains_range(range) && range != expr_range
+ });
+
+ child_scopes
+ .max_by(|&(r1, _), &(r2, _)| {
+ if r1.contains_range(r2) {
+ std::cmp::Ordering::Greater
+ } else if r2.contains_range(r1) {
+ std::cmp::Ordering::Less
+ } else {
+ r1.start().cmp(&r2.start())
+ }
+ })
+ .map(|(_ptr, scope)| *scope)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolve_hir_path_(db, resolver, path, false)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path_as_macro(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<Macro> {
+ resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(Into::into)
+}
+
+fn resolve_hir_path_(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+ prefer_value_ns: bool,
+) -> Option<PathResolution> {
+ let types = || {
+ let (ty, unresolved) = match path.type_anchor() {
+ Some(type_ref) => {
+ let (_, res) = TyLoweringContext::new(db, resolver).lower_ty_ext(type_ref);
+ res.map(|ty_ns| (ty_ns, path.segments().first()))
+ }
+ None => {
+ let (ty, remaining) =
+ resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
+ match remaining {
+ Some(remaining) if remaining > 1 => {
+ if remaining + 1 == path.segments().len() {
+ Some((ty, path.segments().last()))
+ } else {
+ None
+ }
+ }
+ _ => Some((ty, path.segments().get(1))),
+ }
+ }
+ }?;
+
+ // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
+ // within the trait's associated types.
+ if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
+ if let Some(type_alias_id) =
+ db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+ {
+ return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
+ }
+ }
+
+ let res = match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ };
+ match unresolved {
+ Some(unresolved) => resolver
+ .generic_def()
+ .and_then(|def| {
+ hir_ty::associated_type_shorthand_candidates(
+ db,
+ def,
+ res.in_type_ns()?,
+ |name, _, id| (name == unresolved.name).then(|| id),
+ )
+ })
+ .map(TypeAlias::from)
+ .map(Into::into)
+ .map(PathResolution::Def),
+ None => Some(res),
+ }
+ };
+
+ let body_owner = resolver.body_owner();
+ let values = || {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(pat_id) => {
+ let var = Local { parent: body_owner?, pat_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+ };
+
+ let items = || {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ };
+
+ let macros = || {
+ resolver
+ .resolve_path_as_macro(db.upcast(), path.mod_path())
+ .map(|def| PathResolution::Def(ModuleDef::Macro(def.into())))
+ };
+
+ if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }
+ .or_else(items)
+ .or_else(macros)
+}
+
+/// Resolves a path where we know it is a qualifier of another path.
+///
+/// For example, if we have:
+/// ```
+/// mod my {
+/// pub mod foo {
+/// struct Bar;
+/// }
+///
+/// pub fn foo() {}
+/// }
+/// ```
+/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function.
+fn resolve_hir_path_qualifier(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ .map(|ty| match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ })
+ .or_else(|| {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ })
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
new file mode 100644
index 000000000..616a406c7
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -0,0 +1,348 @@
+//! File symbol extraction.
+
+use base_db::FileRange;
+use hir_def::{
+ item_tree::ItemTreeNode, src::HasSource, AdtId, AssocItemId, AssocItemLoc, DefWithBodyId,
+ HasModule, ImplId, ItemContainerId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId,
+};
+use hir_expand::{HirFileId, InFile};
+use hir_ty::db::HirDatabase;
+use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
+
+use crate::{Module, Semantics};
+
+/// The actual data that is stored in the index. It should be as compact as
+/// possible.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FileSymbol {
+ pub name: SmolStr,
+ pub loc: DeclarationLocation,
+ pub kind: FileSymbolKind,
+ pub container_name: Option<SmolStr>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DeclarationLocation {
+ /// The file id for both the `ptr` and `name_ptr`.
+ pub hir_file_id: HirFileId,
+ /// This points to the whole syntax node of the declaration.
+ pub ptr: SyntaxNodePtr,
+ /// This points to the [`syntax::ast::Name`] identifier of the declaration.
+ pub name_ptr: SyntaxNodePtr,
+}
+
+impl DeclarationLocation {
+ pub fn syntax<DB: HirDatabase>(&self, sema: &Semantics<'_, DB>) -> Option<SyntaxNode> {
+ let root = sema.parse_or_expand(self.hir_file_id)?;
+ Some(self.ptr.to_node(&root))
+ }
+
+ pub fn original_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ let node = resolve_node(db, self.hir_file_id, &self.ptr)?;
+ Some(node.as_ref().original_file_range(db.upcast()))
+ }
+
+ pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ let node = resolve_node(db, self.hir_file_id, &self.name_ptr)?;
+ node.as_ref().original_file_range_opt(db.upcast())
+ }
+}
+
+fn resolve_node(
+ db: &dyn HirDatabase,
+ file_id: HirFileId,
+ ptr: &SyntaxNodePtr,
+) -> Option<InFile<SyntaxNode>> {
+ let root = db.parse_or_expand(file_id)?;
+ let node = ptr.to_node(&root);
+ Some(InFile::new(file_id, node))
+}
+
+#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
+pub enum FileSymbolKind {
+ Const,
+ Enum,
+ Function,
+ Macro,
+ Module,
+ Static,
+ Struct,
+ Trait,
+ TypeAlias,
+ Union,
+}
+
+impl FileSymbolKind {
+ pub fn is_type(self: FileSymbolKind) -> bool {
+ matches!(
+ self,
+ FileSymbolKind::Struct
+ | FileSymbolKind::Enum
+ | FileSymbolKind::Trait
+ | FileSymbolKind::TypeAlias
+ | FileSymbolKind::Union
+ )
+ }
+}
+
+/// Represents an outstanding module that the symbol collector must collect symbols from.
+struct SymbolCollectorWork {
+ module_id: ModuleId,
+ parent: Option<DefWithBodyId>,
+}
+
+pub struct SymbolCollector<'a> {
+ db: &'a dyn HirDatabase,
+ symbols: Vec<FileSymbol>,
+ work: Vec<SymbolCollectorWork>,
+ current_container_name: Option<SmolStr>,
+}
+
+/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
+/// all symbols that should be indexed for the given module.
+impl<'a> SymbolCollector<'a> {
+ pub fn collect(db: &dyn HirDatabase, module: Module) -> Vec<FileSymbol> {
+ let mut symbol_collector = SymbolCollector {
+ db,
+ symbols: Default::default(),
+ current_container_name: None,
+ // The initial work is the root module we're collecting, additional work will
+ // be populated as we traverse the module's definitions.
+ work: vec![SymbolCollectorWork { module_id: module.into(), parent: None }],
+ };
+
+ while let Some(work) = symbol_collector.work.pop() {
+ symbol_collector.do_work(work);
+ }
+
+ symbol_collector.symbols
+ }
+
+ fn do_work(&mut self, work: SymbolCollectorWork) {
+ self.db.unwind_if_cancelled();
+
+ let parent_name = work.parent.and_then(|id| self.def_with_body_id_name(id));
+ self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
+ }
+
+ fn collect_from_module(&mut self, module_id: ModuleId) {
+ let def_map = module_id.def_map(self.db.upcast());
+ let scope = &def_map[module_id.local_id].scope;
+
+ for module_def_id in scope.declarations() {
+ match module_def_id {
+ ModuleDefId::ModuleId(id) => self.push_module(id),
+ ModuleDefId::FunctionId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Function);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::AdtId(AdtId::StructId(id)) => {
+ self.push_decl(id, FileSymbolKind::Struct)
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, FileSymbolKind::Enum),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, FileSymbolKind::Union),
+ ModuleDefId::ConstId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Const);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::StaticId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Static);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::TraitId(id) => {
+ self.push_decl(id, FileSymbolKind::Trait);
+ self.collect_from_trait(id);
+ }
+ ModuleDefId::TypeAliasId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
+ }
+ ModuleDefId::MacroId(id) => match id {
+ MacroId::Macro2Id(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::MacroRulesId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::ProcMacroId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ },
+ // Don't index these.
+ ModuleDefId::BuiltinType(_) => {}
+ ModuleDefId::EnumVariantId(_) => {}
+ }
+ }
+
+ for impl_id in scope.impls() {
+ self.collect_from_impl(impl_id);
+ }
+
+ for const_id in scope.unnamed_consts() {
+ self.collect_from_body(const_id);
+ }
+
+ for (_, id) in scope.legacy_macros() {
+ for &id in id {
+ if id.module(self.db.upcast()) == module_id {
+ match id {
+ MacroId::Macro2Id(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::MacroRulesId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ MacroId::ProcMacroId(id) => self.push_decl(id, FileSymbolKind::Macro),
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_from_body(&mut self, body_id: impl Into<DefWithBodyId>) {
+ let body_id = body_id.into();
+ let body = self.db.body(body_id);
+
+ // Descend into the blocks and enqueue collection of all modules within.
+ for (_, def_map) in body.blocks(self.db.upcast()) {
+ for (id, _) in def_map.modules() {
+ self.work.push(SymbolCollectorWork {
+ module_id: def_map.module_id(id),
+ parent: Some(body_id),
+ });
+ }
+ }
+ }
+
+ fn collect_from_impl(&mut self, impl_id: ImplId) {
+ let impl_data = self.db.impl_data(impl_id);
+ for &assoc_item_id in &impl_data.items {
+ self.push_assoc_item(assoc_item_id)
+ }
+ }
+
+ fn collect_from_trait(&mut self, trait_id: TraitId) {
+ let trait_data = self.db.trait_data(trait_id);
+ self.with_container_name(trait_data.name.as_text(), |s| {
+ for &(_, assoc_item_id) in &trait_data.items {
+ s.push_assoc_item(assoc_item_id);
+ }
+ });
+ }
+
+ fn with_container_name(&mut self, container_name: Option<SmolStr>, f: impl FnOnce(&mut Self)) {
+ if let Some(container_name) = container_name {
+ let prev = self.current_container_name.replace(container_name);
+ f(self);
+ self.current_container_name = prev;
+ } else {
+ f(self);
+ }
+ }
+
+ fn current_container_name(&self) -> Option<SmolStr> {
+ self.current_container_name.clone()
+ }
+
+ fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
+ match body_id {
+ DefWithBodyId::FunctionId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::StaticId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::ConstId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ }
+ }
+
+ fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
+ match assoc_item_id {
+ AssocItemId::FunctionId(id) => self.push_decl_assoc(id, FileSymbolKind::Function),
+ AssocItemId::ConstId(id) => self.push_decl_assoc(id, FileSymbolKind::Const),
+ AssocItemId::TypeAliasId(id) => self.push_decl_assoc(id, FileSymbolKind::TypeAlias),
+ }
+ }
+
+ fn push_decl_assoc<L, T>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup<Data = AssocItemLoc<T>>,
+ T: ItemTreeNode,
+ <T as ItemTreeNode>::Source: HasName,
+ {
+ fn container_name(db: &dyn HirDatabase, container: ItemContainerId) -> Option<SmolStr> {
+ match container {
+ ItemContainerId::ModuleId(module_id) => {
+ let module = Module::from(module_id);
+ module.name(db).and_then(|name| name.as_text())
+ }
+ ItemContainerId::TraitId(trait_id) => {
+ let trait_data = db.trait_data(trait_id);
+ trait_data.name.as_text()
+ }
+ ItemContainerId::ImplId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+ }
+
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+ let container_name =
+ container_name(s.db, loc.container).or_else(|| s.current_container_name());
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name,
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_decl<L>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup,
+ <L as Lookup>::Data: HasSource,
+ <<L as Lookup>::Data as HasSource>::Value: HasName,
+ {
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_module(&mut self, module_id: ModuleId) {
+ self.push_file_symbol(|s| {
+ let def_map = module_id.def_map(s.db.upcast());
+ let module_data = &def_map[module_id.local_id];
+ let declaration = module_data.origin.declaration()?;
+ let module = declaration.to_node(s.db.upcast());
+ let name_node = module.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind: FileSymbolKind::Module,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: declaration.file_id,
+ ptr: SyntaxNodePtr::new(module.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_file_symbol(&mut self, f: impl FnOnce(&Self) -> Option<FileSymbol>) {
+ if let Some(file_symbol) = f(self) {
+ self.symbols.push(file_symbol);
+ }
+ }
+}