summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
commit9918693037dce8aa4bb6f08741b6812923486c18 (patch)
tree21d2b40bec7e6a7ea664acee056eb3d08e15a1cf /src/tools/rust-analyzer/crates/hir
parentReleasing progress-linux version 1.75.0+dfsg1-5~progress7.99u1. (diff)
downloadrustc-9918693037dce8aa4bb6f08741b6812923486c18.tar.xz
rustc-9918693037dce8aa4bb6f08741b6812923486c18.zip
Merging upstream version 1.76.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir')
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml7
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs60
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs61
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs455
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs612
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs175
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs63
10 files changed, 986 insertions, 488 deletions
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
index f860ee948..4c1dfbc29 100644
--- a/src/tools/rust-analyzer/crates/hir/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -13,9 +13,9 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
-either = "1.7.0"
+either.workspace = true
arrayvec = "0.7.2"
-itertools = "0.10.5"
+itertools.workspace = true
smallvec.workspace = true
triomphe.workspace = true
once_cell = "1.17.1"
@@ -30,3 +30,6 @@ profile.workspace = true
stdx.workspace = true
syntax.workspace = true
tt.workspace = true
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index 796490abd..185853353 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -1,5 +1,6 @@
//! Attributes & documentation for hir types.
+use base_db::FileId;
use hir_def::{
attr::AttrsWithOwner,
item_scope::ItemInNs,
@@ -8,7 +9,10 @@ use hir_def::{
resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, ModuleDefId,
};
-use hir_expand::{hygiene::Hygiene, name::Name};
+use hir_expand::{
+ name::Name,
+ span::{RealSpanMap, SpanMapRef},
+};
use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
@@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
if ast_path.syntax().text() != link {
return None;
}
- ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
+ ModPath::from_src(
+ db.upcast(),
+ ast_path,
+ SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)),
+ )
};
let full = try_get_modpath(link);
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index 936581bfe..d98e3decd 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -3,10 +3,27 @@
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
-pub use hir_def::db::*;
+pub use hir_def::db::{
+ AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
+ ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
+ CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
+ EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
+ FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
+ FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery,
+ ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
+ InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
+ InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
+ InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
+ InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
+ InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery,
+ Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery,
+ StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery,
+ TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery,
+ UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery,
+};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
- ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
- MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
+ ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
+ ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
};
pub use hir_ty::db::*;
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 479138b67..1cb36f9b0 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -3,7 +3,7 @@
//!
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
-pub use hir_ty::diagnostics::{CaseType, IncoherentImpl, IncorrectCase};
+pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
@@ -12,7 +12,7 @@ use hir_def::path::ModPath;
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
-use crate::{AssocItem, Field, Local, MacroKind, Type};
+use crate::{AssocItem, Field, Local, MacroKind, Trait, Type};
macro_rules! diagnostics {
($($diag:ident,)*) => {
@@ -53,6 +53,10 @@ diagnostics![
PrivateAssocItem,
PrivateField,
ReplaceFilterMapNextWithFindMap,
+ TraitImplIncorrectSafety,
+ TraitImplMissingAssocItems,
+ TraitImplRedundantAssocItems,
+ TraitImplOrphan,
TypedHole,
TypeMismatch,
UndeclaredLabel,
@@ -66,6 +70,7 @@ diagnostics![
UnresolvedModule,
UnresolvedProcMacro,
UnusedMut,
+ UnusedVariable,
];
#[derive(Debug)]
@@ -173,20 +178,19 @@ pub struct MalformedDerive {
#[derive(Debug)]
pub struct NoSuchField {
- pub field: InFile<Either<AstPtr<ast::RecordExprField>, AstPtr<ast::RecordPatField>>>,
+ pub field: InFile<AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>>,
pub private: bool,
}
#[derive(Debug)]
pub struct PrivateAssocItem {
- pub expr_or_pat:
- InFile<Either<AstPtr<ast::Expr>, Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>,
pub item: AssocItem,
}
#[derive(Debug)]
pub struct MismatchedTupleStructPatArgCount {
- pub expr_or_pat: InFile<Either<AstPtr<ast::Expr>, AstPtr<ast::Pat>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: usize,
pub found: usize,
}
@@ -227,7 +231,7 @@ pub struct MissingUnsafe {
#[derive(Debug)]
pub struct MissingFields {
pub file: HirFileId,
- pub field_list_parent: Either<AstPtr<ast::RecordExpr>, AstPtr<ast::RecordPat>>,
+ pub field_list_parent: AstPtr<Either<ast::RecordExpr, ast::RecordPat>>,
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
pub missed_fields: Vec<Name>,
}
@@ -254,7 +258,7 @@ pub struct MissingMatchArms {
#[derive(Debug)]
pub struct TypeMismatch {
- pub expr_or_pat: Either<InFile<AstPtr<ast::Expr>>, InFile<AstPtr<ast::Pat>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: Type,
pub actual: Type,
}
@@ -271,7 +275,47 @@ pub struct UnusedMut {
}
#[derive(Debug)]
+pub struct UnusedVariable {
+ pub local: Local,
+}
+
+#[derive(Debug)]
pub struct MovedOutOfRef {
pub ty: Type,
pub span: InFile<SyntaxNodePtr>,
}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct IncoherentImpl {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplOrphan {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+}
+
+// FIXME: Split this off into the corresponding 4 rustc errors
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplIncorrectSafety {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+ pub should_be_safe: bool,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplMissingAssocItems {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+ pub missing: Vec<(Name, AssocItem)>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplRedundantAssocItems {
+ pub file_id: HirFileId,
+ pub trait_: Trait,
+ pub impl_: AstPtr<ast::Impl>,
+ pub assoc_item: (Name, AssocItem),
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index ac171026d..5847c8a9f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -1,6 +1,6 @@
//! HirDisplay implementations for various hir types.
use hir_def::{
- data::adt::VariantData,
+ data::adt::{StructKind, VariantData},
generics::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
},
@@ -163,7 +163,40 @@ impl HirDisplay for Struct {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
write_generic_params(def_id, f)?;
+
+ let variant_data = self.variant_data(f.db);
+ if let StructKind::Tuple = variant_data.kind() {
+ f.write_char('(')?;
+ let mut it = variant_data.fields().iter().peekable();
+
+ while let Some((id, _)) = it.next() {
+ let field = Field { parent: (*self).into(), id };
+ field.ty(f.db).hir_fmt(f)?;
+ if it.peek().is_some() {
+ f.write_str(", ")?;
+ }
+ }
+
+ f.write_str(");")?;
+ }
+
write_where_clause(def_id, f)?;
+
+ if let StructKind::Record = variant_data.kind() {
+ let fields = self.fields(f.db);
+ if fields.is_empty() {
+ f.write_str(" {}")?;
+ } else {
+ f.write_str(" {\n")?;
+ for field in self.fields(f.db) {
+ f.write_str(" ")?;
+ field.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+ }
+
Ok(())
}
}
@@ -176,6 +209,18 @@ impl HirDisplay for Enum {
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
+
+ let variants = self.variants(f.db);
+ if !variants.is_empty() {
+ f.write_str(" {\n")?;
+ for variant in variants {
+ f.write_str(" ")?;
+ variant.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+
Ok(())
}
}
@@ -188,6 +233,18 @@ impl HirDisplay for Union {
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
+
+ let fields = self.fields(f.db);
+ if !fields.is_empty() {
+ f.write_str(" {\n")?;
+ for field in self.fields(f.db) {
+ f.write_str(" ")?;
+ field.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+
Ok(())
}
}
@@ -559,7 +616,7 @@ impl HirDisplay for TypeAlias {
write_where_clause(def_id, f)?;
if !data.bounds.is_empty() {
f.write_str(": ")?;
- f.write_joined(&data.bounds, " + ")?;
+ f.write_joined(data.bounds.iter(), " + ")?;
}
if let Some(ty) = &data.type_ref {
f.write_str(" = ")?;
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index b215ed38f..e0230fa37 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -17,7 +17,8 @@
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "512"]
mod semantics;
@@ -33,7 +34,7 @@ pub mod symbols;
mod display;
-use std::{iter, ops::ControlFlow};
+use std::{iter, mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
@@ -53,20 +54,20 @@ use hir_def::{
resolver::{HasResolver, Resolver},
src::HasSource as _,
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
- EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, HasModule, ImplId,
- InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup,
- MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
- TypeOrConstParamId, TypeParamId, UnionId,
+ EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
+ ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId,
+ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
-use hir_expand::{name::name, MacroCallKind};
+use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind};
use hir_ty::{
- all_super_traits, autoderef,
+ all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
known_const_to_ast,
- layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
+ layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
- mir::{self, interpret_mir},
+ mir::interpret_mir,
primitive::UintTy,
traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
@@ -80,7 +81,7 @@ use once_cell::unsync::Lazy;
use rustc_hash::FxHashSet;
use stdx::{impl_from, never};
use syntax::{
- ast::{self, HasAttrs as _, HasDocComments, HasName},
+ ast::{self, HasAttrs as _, HasName},
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T,
};
use triomphe::Arc;
@@ -89,19 +90,11 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{resolve_doc_path_on, HasAttrs},
- diagnostics::{
- AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
- IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
- MacroExpansionParseError, MalformedDerive, MismatchedArgCount,
- MismatchedTupleStructPatArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
- MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
- ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
- UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
- UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
- UnresolvedProcMacro, UnusedMut,
- },
+ diagnostics::*,
has_source::HasSource,
- semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+ semantics::{
+ DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits,
+ },
};
// Be careful with these re-exports.
@@ -132,15 +125,18 @@ pub use {
},
hir_expand::{
attrs::{Attr, AttrId},
+ hygiene::{marks_rev, SyntaxContextExt},
name::{known, Name},
- ExpandResult, HirFileId, InFile, MacroFile, Origin,
+ tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
+ MacroFileIdExt,
},
hir_ty::{
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError,
- mir::MirEvalError,
PointerCast, Safety,
},
+ // FIXME: Properly encapsulate mir
+ hir_ty::{mir, Interner as ChalkTyInterner},
};
// These are negative re-exports: pub using these names is forbidden, they
@@ -148,7 +144,10 @@ pub use {
#[allow(unused)]
use {
hir_def::path::Path,
- hir_expand::{hygiene::Hygiene, name::AsName},
+ hir_expand::{
+ name::AsName,
+ span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
+ },
};
/// hir::Crate describes a single crate. It's the main interface with which
@@ -452,15 +451,7 @@ impl HasVisibility for ModuleDef {
impl Module {
/// Name of this module.
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
- let def_map = self.id.def_map(db.upcast());
- let parent = def_map[self.id.local_id].parent?;
- def_map[parent].children.iter().find_map(|(name, module_id)| {
- if *module_id == self.id.local_id {
- Some(name.clone())
- } else {
- None
- }
- })
+ self.id.name(db.upcast())
}
/// Returns the crate this module is part of.
@@ -571,6 +562,7 @@ impl Module {
if def_map[m.id.local_id].origin.is_inline() {
m.diagnostics(db, acc)
}
+ acc.extend(def.diagnostics(db))
}
ModuleDef::Trait(t) => {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
@@ -610,29 +602,141 @@ impl Module {
let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
+ let mut impl_assoc_items_scratch = vec![];
for impl_def in self.impl_defs(db) {
let loc = impl_def.id.lookup(db.upcast());
let tree = loc.id.item_tree(db.upcast());
let node = &tree[loc.id.value];
let file_id = loc.id.file_id();
- if file_id.is_builtin_derive(db.upcast()) {
+ if file_id.macro_file().map_or(false, |it| it.is_builtin_derive(db.upcast())) {
// these expansion come from us, diagnosing them is a waste of resources
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
continue;
}
+ let ast_id_map = db.ast_id_map(file_id);
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
- let ast_id_map = db.ast_id_map(file_id);
-
acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
}
- for item in impl_def.items(db) {
- let def: DefWithBody = match item {
+ if !impl_def.check_orphan_rules(db) {
+ acc.push(TraitImplOrphan { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
+ }
+
+ let trait_ = impl_def.trait_(db);
+ let trait_is_unsafe = trait_.map_or(false, |t| t.is_unsafe(db));
+ let impl_is_negative = impl_def.is_negative(db);
+ let impl_is_unsafe = impl_def.is_unsafe(db);
+
+ let drop_maybe_dangle = (|| {
+ // FIXME: This can be simplified a lot by exposing hir-ty's utils.rs::Generics helper
+ let trait_ = trait_?;
+ let drop_trait = db.lang_item(self.krate().into(), LangItem::Drop)?.as_trait()?;
+ if drop_trait != trait_.into() {
+ return None;
+ }
+ let parent = impl_def.id.into();
+ let generic_params = db.generic_params(parent);
+ let lifetime_params = generic_params.lifetimes.iter().map(|(local_id, _)| {
+ GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
+ });
+ let type_params = generic_params
+ .iter()
+ .filter(|(_, it)| it.type_param().is_some())
+ .map(|(local_id, _)| {
+ GenericParamId::TypeParamId(TypeParamId::from_unchecked(
+ TypeOrConstParamId { parent, local_id },
+ ))
+ });
+ let res = type_params
+ .chain(lifetime_params)
+ .any(|p| db.attrs(AttrDefId::GenericParamId(p)).by_key("may_dangle").exists());
+ Some(res)
+ })()
+ .unwrap_or(false);
+
+ match (impl_is_unsafe, trait_is_unsafe, impl_is_negative, drop_maybe_dangle) {
+ // unsafe negative impl
+ (true, _, true, _) |
+ // unsafe impl for safe trait
+ (true, false, _, false) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: true }.into()),
+ // safe impl for unsafe trait
+ (false, true, false, _) |
+ // safe impl of dangling drop
+ (false, false, _, true) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: false }.into()),
+ _ => (),
+ };
+
+ // Negative impls can't have items, don't emit missing items diagnostic for them
+ if let (false, Some(trait_)) = (impl_is_negative, trait_) {
+ let items = &db.trait_data(trait_.into()).items;
+ let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
+ AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
+ AssocItemId::ConstId(id) => Const::from(id).value(db).is_none(),
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
+ });
+ impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map(
+ |&item| {
+ Some((
+ item,
+ match item {
+ AssocItemId::FunctionId(it) => db.function_data(it).name.clone(),
+ AssocItemId::ConstId(it) => {
+ db.const_data(it).name.as_ref()?.clone()
+ }
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).name.clone(),
+ },
+ ))
+ },
+ ));
+
+ let redundant = impl_assoc_items_scratch
+ .iter()
+ .filter(|(id, name)| {
+ !items.iter().any(|(impl_name, impl_item)| {
+ discriminant(impl_item) == discriminant(id) && impl_name == name
+ })
+ })
+ .map(|(item, name)| (name.clone(), AssocItem::from(*item)));
+ for (name, assoc_item) in redundant {
+ acc.push(
+ TraitImplRedundantAssocItems {
+ trait_,
+ file_id,
+ impl_: ast_id_map.get(node.ast_id()),
+ assoc_item: (name, assoc_item),
+ }
+ .into(),
+ )
+ }
+
+ let missing: Vec<_> = required_items
+ .filter(|(name, id)| {
+ !impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| {
+ discriminant(impl_item) == discriminant(id) && impl_name == name
+ })
+ })
+ .map(|(name, item)| (name.clone(), AssocItem::from(*item)))
+ .collect();
+ if !missing.is_empty() {
+ acc.push(
+ TraitImplMissingAssocItems {
+ impl_: ast_id_map.get(node.ast_id()),
+ file_id,
+ missing,
+ }
+ .into(),
+ )
+ }
+ impl_assoc_items_scratch.clear();
+ }
+
+ for &item in &db.impl_data(impl_def.id).items {
+ let def: DefWithBody = match AssocItem::from(item) {
AssocItem::Function(it) => it.into(),
AssocItem::Const(it) => it.into(),
AssocItem::TypeAlias(_) => continue,
@@ -671,8 +775,15 @@ impl Module {
db: &dyn DefDatabase,
item: impl Into<ItemInNs>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
- hir_def::find_path::find_path(db, item.into().into(), self.into(), prefer_no_std)
+ hir_def::find_path::find_path(
+ db,
+ item.into().into(),
+ self.into(),
+ prefer_no_std,
+ prefer_prelude,
+ )
}
/// Finds a path that can be used to refer to the given item from within
@@ -683,6 +794,7 @@ impl Module {
item: impl Into<ItemInNs>,
prefix_kind: PrefixKind,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
hir_def::find_path::find_path_prefixed(
db,
@@ -690,6 +802,7 @@ impl Module {
self.into(),
prefix_kind,
prefer_no_std,
+ prefer_prelude,
)
}
}
@@ -862,10 +975,9 @@ fn precise_macro_call_location(
// Compute the precise location of the macro name's token in the derive
// list.
let token = (|| {
- let derive_attr = node
- .doc_comments_and_attrs()
+ let derive_attr = collect_attrs(&node)
.nth(derive_attr_index.ast_index())
- .and_then(Either::left)?;
+ .and_then(|x| Either::left(x.1))?;
let token_tree = derive_attr.meta()?.token_tree()?;
let group_by = token_tree
.syntax()
@@ -890,10 +1002,9 @@ fn precise_macro_call_location(
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_node(db.upcast());
- let attr = node
- .doc_comments_and_attrs()
+ let attr = collect_attrs(&node)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
.unwrap_or_else(|| {
panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
});
@@ -1453,9 +1564,7 @@ impl DefWithBody {
let (body, source_map) = db.body_with_source_map(self.into());
for (_, def_map) in body.blocks(db.upcast()) {
- for diag in def_map.diagnostics() {
- emit_def_diagnostic(db, acc, diag);
- }
+ Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc);
}
for diag in source_map.diagnostics() {
@@ -1509,10 +1618,10 @@ impl DefWithBody {
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
let expr_or_pat = match expr {
ExprOrPatId::ExprId(expr) => {
- source_map.field_syntax(expr).map(Either::Left)
+ source_map.field_syntax(expr).map(AstPtr::wrap_left)
}
ExprOrPatId::PatId(pat) => {
- source_map.pat_field_syntax(pat).map(Either::Right)
+ source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
}
};
acc.push(NoSuchField { field: expr_or_pat, private }.into())
@@ -1530,8 +1639,8 @@ impl DefWithBody {
}
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
- ExprOrPatId::PatId(pat) => pat_syntax(pat).map(Either::Right),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
@@ -1609,12 +1718,17 @@ impl DefWithBody {
found,
} => {
let expr_or_pat = match pat {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
- ExprOrPatId::PatId(pat) => source_map
- .pat_syntax(pat)
- .expect("unexpected synthetic")
- .map(|it| it.unwrap_left())
- .map(Either::Right),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => {
+ let InFile { file_id, value } =
+ source_map.pat_syntax(pat).expect("unexpected synthetic");
+
+ // cast from Either<Pat, SelfParam> -> Either<_, Pat>
+ let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
+ continue;
+ };
+ InFile { file_id, value: ptr }
+ }
};
acc.push(
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
@@ -1628,11 +1742,15 @@ impl DefWithBody {
ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
};
let expr_or_pat = match expr_or_pat {
- Ok(Either::Left(expr)) => Either::Left(expr),
- Ok(Either::Right(InFile { file_id, value: Either::Left(pat) })) => {
- Either::Right(InFile { file_id, value: pat })
+ Ok(Either::Left(expr)) => expr.map(AstPtr::wrap_left),
+ Ok(Either::Right(InFile { file_id, value: pat })) => {
+ // cast from Either<Pat, SelfParam> -> Either<_, Pat>
+ let Some(ptr) = AstPtr::try_from_raw(pat.syntax_node_ptr()) else {
+ continue;
+ };
+ InFile { file_id, value: ptr }
}
- Ok(Either::Right(_)) | Err(SyntheticSyntax) => continue,
+ Err(SyntheticSyntax) => continue,
};
acc.push(
@@ -1667,10 +1785,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@@ -1697,9 +1812,20 @@ impl DefWithBody {
// Skip synthetic bindings
continue;
}
- let need_mut = &mol[local];
+ let mut need_mut = &mol[local];
+ if body[binding_id].name.as_str() == Some("self")
+ && need_mut == &mir::MutabilityReason::Unused
+ {
+ need_mut = &mir::MutabilityReason::Not;
+ }
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
+ (mir::MutabilityReason::Unused, _) => {
+ let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
+ if !should_ignore {
+ acc.push(UnusedVariable { local }.into())
+ }
+ }
(mir::MutabilityReason::Mut { .. }, true)
| (mir::MutabilityReason::Not, false) => (),
(mir::MutabilityReason::Mut { spans }, false) => {
@@ -1710,10 +1836,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@@ -1752,18 +1875,18 @@ impl DefWithBody {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordExpr(record_expr) =
- &source_ptr.value.to_node(&root)
+ source_ptr.value.to_node(&root)
{
if record_expr.record_expr_field_list().is_some() {
+ let field_list_parent_path =
+ record_expr.path().map(|path| AstPtr::new(&path));
acc.push(
MissingFields {
file: source_ptr.file_id,
- field_list_parent: Either::Left(AstPtr::new(
+ field_list_parent: AstPtr::new(&Either::Left(
record_expr,
)),
- field_list_parent_path: record_expr
- .path()
- .map(|path| AstPtr::new(&path)),
+ field_list_parent_path,
missed_fields,
}
.into(),
@@ -1775,24 +1898,24 @@ impl DefWithBody {
},
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
Ok(source_ptr) => {
- if let Some(expr) = source_ptr.value.as_ref().left() {
+ if let Some(ptr) = source_ptr.value.clone().cast::<ast::RecordPat>()
+ {
let root = source_ptr.file_syntax(db.upcast());
- if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
- if record_pat.record_pat_field_list().is_some() {
- acc.push(
- MissingFields {
- file: source_ptr.file_id,
- field_list_parent: Either::Right(AstPtr::new(
- &record_pat,
- )),
- field_list_parent_path: record_pat
- .path()
- .map(|path| AstPtr::new(&path)),
- missed_fields,
- }
- .into(),
- )
- }
+ let record_pat = ptr.to_node(&root);
+ if record_pat.record_pat_field_list().is_some() {
+ let field_list_parent_path =
+ record_pat.path().map(|path| AstPtr::new(&path));
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: AstPtr::new(&Either::Right(
+ record_pat,
+ )),
+ field_list_parent_path,
+ missed_fields,
+ }
+ .into(),
+ )
}
}
}
@@ -1818,17 +1941,20 @@ impl DefWithBody {
if let ast::Expr::MatchExpr(match_expr) =
&source_ptr.value.to_node(&root)
{
- if let Some(scrut_expr) = match_expr.expr() {
- acc.push(
- MissingMatchArms {
- scrutinee_expr: InFile::new(
- source_ptr.file_id,
- AstPtr::new(&scrut_expr),
- ),
- uncovered_patterns,
- }
- .into(),
- );
+ match match_expr.expr() {
+ Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
+ acc.push(
+ MissingMatchArms {
+ scrutinee_expr: InFile::new(
+ source_ptr.file_id,
+ AstPtr::new(&scrut_expr),
+ ),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ _ => {}
}
}
}
@@ -1960,6 +2086,17 @@ impl Function {
db.function_data(self.id).attrs.is_test()
}
+ /// is this a `fn main` or a function with an `export_name` of `main`?
+ pub fn is_main(self, db: &dyn HirDatabase) -> bool {
+ if !self.module(db).is_crate_root() {
+ return false;
+ }
+ let data = db.function_data(self.id);
+
+ data.name.to_smol_str() == "main"
+ || data.attrs.export_name().map(core::ops::Deref::deref) == Some("main")
+ }
+
/// Does this function have the ignore attribute?
pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).attrs.is_ignore()
@@ -2926,10 +3063,10 @@ impl Local {
.map(|&definition| {
let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
let root = src.file_syntax(db.upcast());
- src.map(|ast| match ast {
- // Suspicious unwrap
- Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
- Either::Right(it) => Either::Right(it.to_node(&root)),
+ src.map(|ast| match ast.to_node(&root) {
+ Either::Left(ast::Pat::IdentPat(it)) => Either::Left(it),
+ Either::Left(_) => unreachable!("local with non ident-pattern"),
+ Either::Right(it) => Either::Right(it),
})
})
.map(move |source| LocalSource { local: self, source })
@@ -3371,13 +3508,46 @@ impl Impl {
db.impl_data(self.id).is_negative
}
+ pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool {
+ db.impl_data(self.id).is_unsafe
+ }
+
pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.lookup(db.upcast()).container.into()
}
- pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
let src = self.source(db)?;
- src.file_id.as_builtin_derive_attr_node(db.upcast())
+
+ let macro_file = src.file_id.macro_file()?;
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let (derive_attr, derive_index) = match loc.kind {
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
+ let module_id = self.id.lookup(db.upcast()).container;
+ (
+ db.crate_def_map(module_id.krate())[module_id.local_id]
+ .scope
+ .derive_macro_invoc(ast_id, derive_attr_index)?,
+ derive_index,
+ )
+ }
+ _ => return None,
+ };
+ let file_id = MacroFileId { macro_call_id: derive_attr };
+ let path = db
+ .parse_macro_expansion(file_id)
+ .value
+ .0
+ .syntax_node()
+ .children()
+ .nth(derive_index as usize)
+ .and_then(<ast::Attr as AstNode>::cast)
+ .and_then(|it| it.path())?;
+ Some(InMacroFile { file_id, value: path })
+ }
+
+ pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
+ check_orphan_rules(db, self.id)
}
}
@@ -3393,10 +3563,9 @@ impl TraitRef {
resolver: &Resolver,
trait_ref: hir_ty::TraitRef,
) -> TraitRef {
- let env = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let env = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
TraitRef { env, trait_ref }
}
@@ -3536,15 +3705,14 @@ impl Type {
resolver: &Resolver,
ty: Ty,
) -> Type {
- let environment = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
- Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+ Type { env: TraitEnvironment::empty(krate), ty }
}
pub fn reference(inner: &Type, m: Mutability) -> Type {
@@ -3560,10 +3728,9 @@ impl Type {
fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db.upcast());
- let environment = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
@@ -4133,10 +4300,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
- let environment = scope.resolver().generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(krate.id)),
- |d| db.trait_environment(d),
- );
+ let environment = scope
+ .resolver()
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_method_candidates_dyn(
&canonical,
@@ -4190,10 +4357,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
- let environment = scope.resolver().generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(krate.id)),
- |d| db.trait_environment(d),
- );
+ let environment = scope
+ .resolver()
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_path_candidates(
&canonical,
@@ -4515,15 +4682,31 @@ impl Layout {
Some(self.0.largest_niche?.available(&*self.1))
}
- pub fn field_offset(&self, idx: usize) -> Option<u64> {
+ pub fn field_offset(&self, field: Field) -> Option<u64> {
match self.0.fields {
layout::FieldsShape::Primitive => None,
layout::FieldsShape::Union(_) => Some(0),
layout::FieldsShape::Array { stride, count } => {
- let i = u64::try_from(idx).ok()?;
+ let i = u64::try_from(field.index()).ok()?;
(i < count).then_some((stride * i).bytes())
}
- layout::FieldsShape::Arbitrary { ref offsets, .. } => Some(offsets.get(idx)?.bytes()),
+ layout::FieldsShape::Arbitrary { ref offsets, .. } => {
+ Some(offsets.get(RustcFieldIdx(field.id))?.bytes())
+ }
+ }
+ }
+
+ pub fn tuple_field_offset(&self, field: usize) -> Option<u64> {
+ match self.0.fields {
+ layout::FieldsShape::Primitive => None,
+ layout::FieldsShape::Union(_) => Some(0),
+ layout::FieldsShape::Array { stride, count } => {
+ let i = u64::try_from(field).ok()?;
+ (i < count).then_some((stride * i).bytes())
+ }
+ layout::FieldsShape::Arbitrary { ref offsets, .. } => {
+ Some(offsets.get(RustcFieldIdx::new(field))?.bytes())
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index a42e0978b..a03ff2207 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -2,7 +2,11 @@
mod source_to_def;
-use std::{cell::RefCell, fmt, iter, mem, ops};
+use std::{
+ cell::RefCell,
+ fmt, iter, mem,
+ ops::{self, ControlFlow, Not},
+};
use base_db::{FileId, FileRange};
use either::Either;
@@ -13,16 +17,21 @@ use hir_def::{
nameres::MacroSubNs,
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
- AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
+ AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
+ InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
-use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
+use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
- ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
- match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
+ match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
+ TextRange, TextSize,
};
use crate::{
@@ -35,7 +44,13 @@ use crate::{
TypeAlias, TypeParam, VariantDef,
};
-#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum DescendPreference {
+ SameText,
+ SameKind,
+ None,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathResolution {
/// An item
Def(ModuleDef),
@@ -114,11 +129,12 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
- expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
- // Rootnode to HirFileId cache
+ /// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
- // MacroCall to its expansion's HirFileId cache
- macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+ // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+ expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
+ /// MacroCall to its expansion's MacroFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@@ -182,20 +198,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}
- pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
- self.imp.resolve_method_call(call).map(Function::from)
- }
-
- /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
- pub fn resolve_method_call_field_fallback(
- &self,
- call: &ast::MethodCallExpr,
- ) -> Option<Either<Function, Field>> {
- self.imp
- .resolve_method_call_fallback(call)
- .map(|it| it.map_left(Function::from).map_right(Field::from))
- }
-
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
}
@@ -255,7 +257,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
- let node = self.parse_or_expand(file_id);
+ let node = self.parse_or_expand(file_id.into());
Some(node)
}
@@ -388,11 +390,72 @@ impl<'db> SemanticsImpl<'db> {
)
}
+ pub fn as_format_args_parts(
+ &self,
+ string: &ast::String,
+ ) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
+ if let Some(quote) = string.open_quote_text_range() {
+ return self
+ .descend_into_macros(DescendPreference::SameText, string.syntax().clone())
+ .into_iter()
+ .find_map(|token| {
+ let string = ast::String::cast(token)?;
+ let literal =
+ string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
+ let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
+ let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
+ let format_args = self.wrap_node_infile(format_args);
+ let res = source_analyzer
+ .as_format_args_parts(self.db, format_args.as_ref())?
+ .map(|(range, res)| (range + quote.end(), res))
+ .collect();
+ Some(res)
+ });
+ }
+ None
+ }
+
+ pub fn check_for_format_args_template(
+ &self,
+ original_token: SyntaxToken,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ if let Some(original_string) = ast::String::cast(original_token.clone()) {
+ if let Some(quote) = original_string.open_quote_text_range() {
+ return self
+ .descend_into_macros(DescendPreference::SameText, original_token.clone())
+ .into_iter()
+ .find_map(|token| {
+ self.resolve_offset_in_format_args(
+ ast::String::cast(token)?,
+ offset - quote.end(),
+ )
+ })
+ .map(|(range, res)| (range + quote.end(), res));
+ }
+ }
+ None
+ }
+
+ fn resolve_offset_in_format_args(
+ &self,
+ string: ast::String,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ debug_assert!(offset <= string.syntax().text_range().len());
+ let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
+ let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
+ let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
+ let format_args = self.wrap_node_infile(format_args);
+ source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
+ }
+
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now
let mut res = smallvec![];
let tokens = (|| {
+ // FIXME: the trivia skipping should not be necessary
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
Some((first, last))
@@ -403,24 +466,28 @@ impl<'db> SemanticsImpl<'db> {
};
if first == last {
- self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
- if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ // node is just the token, so descend the token
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value
+ .parent_ancestors()
+ .take_while(|it| it.text_range() == value.text_range())
+ .find_map(N::cast)
+ {
res.push(node)
}
- false
+ ControlFlow::Continue(())
});
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
- self.descend_into_macros_impl(first, 0.into(), &mut |token| {
+ self.descend_into_macros_impl(first, &mut |token| {
scratch.push(token);
- false
+ ControlFlow::Continue(())
});
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
- 0.into(),
&mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@@ -437,7 +504,7 @@ impl<'db> SemanticsImpl<'db> {
}
}
}
- false
+ ControlFlow::Continue(())
},
);
}
@@ -449,32 +516,42 @@ impl<'db> SemanticsImpl<'db> {
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
- offset: TextSize,
- ) -> SmallVec<[SyntaxToken; 1]> {
- let mut res = smallvec![];
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
- res.push(value);
- false
- });
- res
- }
-
- /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
- ///
- /// Returns the original non descended token if none of the mapped counterparts have the same text.
- pub fn descend_into_macros_with_same_text(
- &self,
- token: SyntaxToken,
- offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
- let text = token.text();
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
let mut res = smallvec![];
- self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if value.text() == text {
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
+ }
+ Dp::None => true,
+ };
+ if is_a_match {
res.push(value);
}
- false
+ ControlFlow::Continue(())
});
if res.is_empty() {
res.push(token);
@@ -482,44 +559,46 @@ impl<'db> SemanticsImpl<'db> {
res
}
- pub fn descend_into_macros_with_kind_preference(
+ pub fn descend_into_macros_single(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
- offset: TextSize,
) -> SyntaxToken {
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
- kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
- node.parent().map_or(kind, |it| it.kind())
- }
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
- let preferred_kind = fetch_kind(&token);
- let mut res = None;
- self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if fetch_kind(&value) == preferred_kind {
- res = Some(value);
- true
- } else {
- if let None = res {
- res = Some(value)
- }
- false
- }
- });
- res.unwrap_or(token)
- }
-
- /// Descend the token into its macro call if it is part of one, returning the token in the
- /// expansion that it is associated with. If `offset` points into the token's range, it will
- /// be considered for the mapping in case of inline format args.
- pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
let mut res = token.clone();
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
+ }
+ Dp::None => true,
+ };
res = value;
- true
+ if is_a_match {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
});
res
}
@@ -527,177 +606,204 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
- // FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
- // mapping, specifically for node downmapping
- offset: TextSize,
- f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
let _p = profile::span("descend_into_macros");
- let relative_token_offset = token.text_range().start().checked_sub(offset);
- let parent = match token.parent() {
+ let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(it) => it,
None => return,
};
- let sa = match self.analyze_no_infer(&parent) {
- Some(it) => it,
- None => return,
+
+ let span = match sa.file_id.file_id() {
+ Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ None => {
+ stdx::never!();
+ return;
+ }
};
- let def_map = sa.resolver.def_map();
- let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
+ let def_map = sa.resolver.def_map();
- let mut process_expansion_for_token =
- |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
- let expansion_info = cache
- .entry(macro_file)
- .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
- .as_ref()?;
+ let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- {
- let InFile { file_id, value } = expansion_info.expanded();
- self.cache(value, file_id);
- }
+ {
+ let InMacroFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id.into());
+ }
- let mapped_tokens = expansion_info.map_token_down(
- self.db.upcast(),
- item,
- token,
- relative_token_offset,
- )?;
- let len = stack.len();
-
- // requeue the tokens we got from mapping our current token down
- stack.extend(mapped_tokens);
- // if the length changed we have found a mapping for the token
- (stack.len() != len).then_some(())
- };
+ let InMacroFile { file_id, value: mapped_tokens } =
+ expansion_info.map_range_down(span)?;
+ let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
- // Remap the next token in the queue into a macro call its in, if it is not being remapped
- // either due to not being in a macro-call or because its unused push it into the result vec,
- // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
- while let Some(token) = stack.pop() {
- self.db.unwind_if_cancelled();
- let was_not_remapped = (|| {
- // First expand into attribute invocations
- let containing_attribute_macro_call = self.with_ctx(|ctx| {
- token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
- if item.attrs().next().is_none() {
- // Don't force populate the dyn cache for items that don't have an attribute anyways
- return None;
- }
- Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
- })
- });
- if let Some((call_id, item)) = containing_attribute_macro_call {
- let file_id = call_id.as_file();
- return process_expansion_for_token(
- &mut stack,
- file_id,
- Some(item),
- token.as_ref(),
- );
- }
+ // if the length changed we have found a mapping for the token
+ let res = mapped_tokens.is_empty().not().then_some(());
+ // requeue the tokens we got from mapping our current token down
+ stack.push((HirFileId::from(file_id), mapped_tokens));
+ res
+ };
- // Then check for token trees, that means we are either in a function-like macro or
- // secondary attribute inputs
- let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
- let parent = tt.syntax().parent()?;
+ let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
+
+ while let Some((file_id, mut tokens)) = stack.pop() {
+ while let Some(token) = tokens.pop() {
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((
+ ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
+ item,
+ ))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_macro_file();
+ let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
+ hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
+ invoc_attr_index.ast_index()
+ }
+ _ => 0,
+ };
+ // FIXME: here, the attribute's text range is used to strip away all
+ // entries from the start of the attribute "list" up the the invoking
+ // attribute. But in
+ // ```
+ // mod foo {
+ // #![inner]
+ // }
+ // ```
+ // we don't wanna strip away stuff in the `mod foo {` range, that is
+ // here if the id corresponds to an inner attribute we got strip all
+ // text ranges of the outer ones, and then all of the inner ones up
+ // to the invoking attribute so that the inbetween is ignored.
+ let text_range = item.syntax().text_range();
+ let start = collect_attrs(&item)
+ .nth(attr_id)
+ .map(|attr| match attr.1 {
+ Either::Left(it) => it.syntax().text_range().start(),
+ Either::Right(it) => it.syntax().text_range().start(),
+ })
+ .unwrap_or_else(|| text_range.start());
+ let text_range = TextRange::new(start, text_range.end());
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ return process_expansion_for_token(&mut stack, file_id);
+ }
- if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
- return None;
- }
- if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
- return None;
- }
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
- if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
- let mcall = token.with_value(macro_call);
- let file_id = match mcache.get(&mcall) {
- Some(&it) => it,
- None => {
- let it = sa.expand(self.db, mcall.as_ref())?;
- mcache.insert(mcall, it);
- it
- }
- };
- process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
- } else if let Some(meta) = ast::Meta::cast(parent) {
- // attribute we failed expansion for earlier, this might be a derive invocation
- // or derive helper attribute
- let attr = meta.parent_attr()?;
-
- let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
- // this might be a derive, or a derive helper on an ADT
- let derive_call = self.with_ctx(|ctx| {
- // so try downmapping the token into the pseudo derive expansion
- // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
- ctx.attr_to_derive_macro_call(
- token.with_value(&adt),
- token.with_value(attr.clone()),
- )
- .map(|(_, call_id, _)| call_id)
- });
-
- match derive_call {
- Some(call_id) => {
- // resolved to a derive
- let file_id = call_id.as_file();
- return process_expansion_for_token(
- &mut stack,
- file_id,
- Some(adt.into()),
- token.as_ref(),
- );
+ if tt.left_delimiter_token().map_or(false, |it| it == token) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
+ InFile::new(file_id, macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
}
- None => Some(adt),
- }
- } else {
- // Otherwise this could be a derive helper on a variant or field
- if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ };
+ let text_range = tt.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ process_expansion_for_token(&mut stack, file_id)
+ } else if let Some(meta) = ast::Meta::cast(parent) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
{
- field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
- } else if let Some(field) =
- attr.syntax().parent().and_then(ast::TupleField::cast)
- {
- field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
- } else if let Some(variant) =
- attr.syntax().parent().and_then(ast::Variant::cast)
- {
- variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ InFile::new(file_id, &adt),
+ InFile::new(file_id, attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_macro_file();
+ let text_range = attr.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ return process_expansion_for_token(&mut stack, file_id);
+ }
+ None => Some(adt),
+ }
} else {
- None
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
+ return None;
}
- }?;
- if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
- return None;
- }
- // Not an attribute, nor a derive, so it's either a builtin or a derive helper
- // Try to resolve to a derive helper and downmap
- let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
- let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
- let helpers =
- def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
- let item = Some(adt.into());
- let mut res = None;
- for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
- res = res.or(process_expansion_for_token(
- &mut stack,
- derive.as_file(),
- item.clone(),
- token.as_ref(),
- ));
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name =
+ attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(file_id).ast_id(&adt);
+ let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
+ let mut res = None;
+ for (.., derive) in
+ helpers.iter().filter(|(helper, ..)| *helper == attr_name)
+ {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_macro_file(),
+ ));
+ }
+ res
+ } else {
+ None
}
- res
- } else {
- None
- }
- })()
- .is_none();
+ })()
+ .is_none();
- if was_not_remapped && f(token) {
- break;
+ if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
+ break;
+ }
}
}
}
@@ -712,7 +818,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
- .map(move |token| self.descend_into_macros(token, offset))
+ .map(move |token| self.descend_into_macros(DescendPreference::None, token))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})
@@ -737,14 +843,16 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast())
+ .filter(|(_, ctx)| ctx.is_root())
+ .map(TupleExt::head)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
- |InFile { file_id, value }| {
- self.cache(find_root(value.syntax()), file_id);
+ |InRealFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id.into());
value
},
)
@@ -755,8 +863,8 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
- |InFile { file_id, value }| {
- self.cache(find_root(&value), file_id);
+ |InRealFile { file_id, value }| {
+ self.cache(find_root(&value), file_id.into());
value
},
)
@@ -787,7 +895,7 @@ impl<'db> SemanticsImpl<'db> {
Some(parent) => Some(InFile::new(file_id, parent)),
None => {
self.cache(value.clone(), file_id);
- file_id.call_node(db)
+ Some(file_id.macro_file()?.call_node(db))
}
}
})
@@ -840,10 +948,10 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
- let ty = hir_ty::TyLoweringContext::new(
+ let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
self.db,
&analyze.resolver,
- analyze.resolver.module().into(),
+ analyze.resolver.type_owner(),
)
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
@@ -851,9 +959,9 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?;
- let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
- let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
- let hir_path = Path::from_src(path.clone(), &ctx)?;
+ let span_map = self.db.span_map(analyze.file_id);
+ let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
+ let hir_path = Path::from_src(&ctx, path.clone())?;
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
TypeNs::TraitId(id) => Some(Trait { id }),
_ => None,
@@ -937,14 +1045,15 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
- fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
}
- fn resolve_method_call_fallback(
+ /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
+ pub fn resolve_method_call_fallback(
&self,
call: &ast::MethodCallExpr,
- ) -> Option<Either<FunctionId, FieldId>> {
+ ) -> Option<Either<Function, Field>> {
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
}
@@ -976,6 +1085,13 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(field.syntax())?.resolve_field(self.db, field)
}
+ pub fn resolve_field_fallback(
+ &self,
+ field: &ast::FieldExpr,
+ ) -> Option<Either<Field, Function>> {
+ self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
+ }
+
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
@@ -1037,7 +1153,7 @@ impl<'db> SemanticsImpl<'db> {
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut();
- let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache };
+ let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
f(&mut ctx)
}
@@ -1187,7 +1303,7 @@ impl<'db> SemanticsImpl<'db> {
return None;
}
- let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let func = self.resolve_method_call(method_call_expr)?;
let res = match func.self_param(self.db)?.access(self.db) {
Access::Shared | Access::Exclusive => true,
Access::Owned => false,
@@ -1451,7 +1567,7 @@ impl SemanticsScope<'_> {
/// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
- let path = Path::from_src(path.clone(), &ctx)?;
+ let path = Path::from_src(&ctx, path.clone())?;
resolve_hir_path(self.db, &self.resolver, &path)
}
@@ -1478,6 +1594,10 @@ impl SemanticsScope<'_> {
pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
self.resolver.extern_crate_decls_in_scope(self.db.upcast())
}
+
+ pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
+ self.resolver.impl_def() == other.resolver.impl_def()
+ }
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index aabda3655..df8c1e904 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -97,7 +97,7 @@ use hir_def::{
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
-use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
+use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use stdx::{impl_from, never};
@@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap
pub(super) struct SourceToDefCtx<'a, 'b> {
pub(super) db: &'b dyn HirDatabase,
- pub(super) cache: &'a mut SourceToDefCache,
+ pub(super) dynmap_cache: &'a mut SourceToDefCache,
}
impl SourceToDefCtx<'_, '_> {
@@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db;
- self.cache
+ self.dynmap_cache
.entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id))
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index f29fb1edf..d05118bbc 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -26,11 +26,10 @@ use hir_def::{
};
use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
- hygiene::Hygiene,
mod_path::path,
name,
name::{AsName, Name},
- HirFileId, InFile,
+ HirFileId, InFile, MacroFileId, MacroFileIdExt,
};
use hir_ty::{
diagnostics::{
@@ -236,9 +235,9 @@ impl SourceAnalyzer {
_db: &dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<BindingMode> {
- let binding_id = self.binding_id_of_pat(pat)?;
+ let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer.as_ref()?;
- infer.binding_modes.get(binding_id).map(|bm| match bm {
+ infer.binding_modes.get(id).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -281,25 +280,49 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
- Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
+ Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into())
}
pub(crate) fn resolve_method_call_fallback(
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<Either<FunctionId, FieldId>> {
+ ) -> Option<Either<Function, Field>> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let inference_result = self.infer.as_ref()?;
match inference_result.method_resolution(expr_id) {
- Some((f_in_trait, substs)) => {
- Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)))
- }
- None => inference_result.field_resolution(expr_id).map(Either::Right),
+ Some((f_in_trait, substs)) => Some(Either::Left(
+ self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(),
+ )),
+ None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right),
+ }
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_field_fallback(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Either<Field, Function>> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ let inference_result = self.infer.as_ref()?;
+ match inference_result.field_resolution(expr_id) {
+ Some(field) => Some(Either::Left(field.into())),
+ None => inference_result.method_resolution(expr_id).map(|(f, substs)| {
+ Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into())
+ }),
}
}
@@ -418,15 +441,6 @@ impl SourceAnalyzer {
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
- pub(crate) fn resolve_field(
- &self,
- db: &dyn HirDatabase,
- field: &ast::FieldExpr,
- ) -> Option<Field> {
- let expr_id = self.expr_id(db, &field.clone().into())?;
- self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
- }
-
pub(crate) fn resolve_record_field(
&self,
db: &dyn HirDatabase,
@@ -484,7 +498,7 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> {
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
- let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|(it, _)| it.into())
@@ -596,9 +610,8 @@ impl SourceAnalyzer {
}
// This must be a normal source file rather than macro file.
- let hygiene = Hygiene::new(db.upcast(), self.file_id);
- let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene);
- let hir_path = Path::from_src(path.clone(), &ctx)?;
+ let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
+ let hir_path = Path::from_src(&ctx, path.clone())?;
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@@ -755,14 +768,15 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
- ) -> Option<HirFileId> {
+ ) -> Option<MacroFileId> {
let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?;
- Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ // why the 64?
+ Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
}
pub(crate) fn resolve_variant(
@@ -821,6 +835,52 @@ impl SourceAnalyzer {
false
}
+ pub(crate) fn resolve_offset_in_format_args(
+ &self,
+ db: &dyn HirDatabase,
+ format_args: InFile<&ast::FormatArgsExpr>,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ let implicits = self.body_source_map()?.implicit_format_args(format_args)?;
+ implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| {
+ (
+ *range,
+ resolve_hir_value_path(
+ db,
+ &self.resolver,
+ self.resolver.body_owner(),
+ &Path::from_known_path_with_no_generic(ModPath::from_segments(
+ PathKind::Plain,
+ Some(name.clone()),
+ )),
+ ),
+ )
+ })
+ }
+
+ pub(crate) fn as_format_args_parts<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ format_args: InFile<&ast::FormatArgsExpr>,
+ ) -> Option<impl Iterator<Item = (TextRange, Option<PathResolution>)> + 'a> {
+ Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map(
+ move |(range, name)| {
+ (
+ *range,
+ resolve_hir_value_path(
+ db,
+ &self.resolver,
+ self.resolver.body_owner(),
+ &Path::from_known_path_with_no_generic(ModPath::from_segments(
+ PathKind::Plain,
+ Some(name.clone()),
+ )),
+ ),
+ )
+ },
+ ))
+ }
+
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,
@@ -888,17 +948,18 @@ fn scope_for_offset(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
- let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ let InFile { file_id, value } = source_map.expr_syntax(id).ok()?;
if from_file == file_id {
return Some((value.text_range(), scope));
}
// FIXME handle attribute expansion
- let source = iter::successors(file_id.call_node(db.upcast()), |it| {
- it.file_id.call_node(db.upcast())
- })
- .find(|it| it.file_id == from_file)
- .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ let source =
+ iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| {
+ Some(it.file_id.macro_file()?.call_node(db.upcast()))
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
Some((source.value.text_range(), scope))
})
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
@@ -923,7 +984,7 @@ fn adjust(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
- let source = source_map.expr_syntax(*id).ok()?;
+ let source = source_map.expr_syntax(id).ok()?;
// FIXME: correctly handle macro expansion
if source.file_id != from_file {
return None;
@@ -979,8 +1040,9 @@ fn resolve_hir_path_(
let types = || {
let (ty, unresolved) = match path.type_anchor() {
Some(type_ref) => {
- let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into())
- .lower_ty_ext(type_ref);
+ let (_, res) =
+ TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
+ .lower_ty_ext(type_ref);
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
@@ -1039,24 +1101,7 @@ fn resolve_hir_path_(
};
let body_owner = resolver.body_owner();
- let values = || {
- resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
- let res = match val {
- ValueNs::LocalBinding(binding_id) => {
- let var = Local { parent: body_owner?, binding_id };
- PathResolution::Local(var)
- }
- ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
- ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
- ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
- ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
- ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
- ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
- ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
- };
- Some(res)
- })
- };
+ let values = || resolve_hir_value_path(db, resolver, body_owner, path);
let items = || {
resolver
@@ -1076,6 +1121,30 @@ fn resolve_hir_path_(
.or_else(macros)
}
+fn resolve_hir_value_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ body_owner: Option<DefWithBodyId>,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(binding_id) => {
+ let var = Local { parent: body_owner?, binding_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+}
+
/// Resolves a path where we know it is a qualifier of another path.
///
/// For example, if we have:
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index ca7874c36..a2a30edeb 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -9,7 +9,7 @@ use hir_def::{
};
use hir_expand::{HirFileId, InFile};
use hir_ty::db::HirDatabase;
-use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
+use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr};
use crate::{Module, ModuleDef, Semantics};
@@ -23,6 +23,7 @@ pub struct FileSymbol {
pub loc: DeclarationLocation,
pub container_name: Option<SmolStr>,
pub is_alias: bool,
+ pub is_assoc: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -32,7 +33,7 @@ pub struct DeclarationLocation {
/// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
- pub name_ptr: SyntaxNodePtr,
+ pub name_ptr: AstPtr<syntax::ast::Name>,
}
impl DeclarationLocation {
@@ -49,15 +50,6 @@ impl DeclarationLocation {
let node = resolve_node(db, self.hir_file_id, &self.ptr);
node.as_ref().original_file_range(db.upcast())
}
-
- pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
- if let Some(file_id) = self.hir_file_id.file_id() {
- // fast path to prevent parsing
- return Some(FileRange { file_id, range: self.name_ptr.text_range() });
- }
- let node = resolve_node(db, self.hir_file_id, &self.name_ptr);
- node.as_ref().original_file_range_opt(db.upcast())
- }
}
fn resolve_node(
@@ -130,34 +122,34 @@ impl<'a> SymbolCollector<'a> {
match module_def_id {
ModuleDefId::ModuleId(id) => self.push_module(id),
ModuleDefId::FunctionId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
- ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id),
+ ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false),
ModuleDefId::ConstId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::StaticId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::TraitId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_trait(id);
}
ModuleDefId::TraitAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::TypeAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::MacroId(id) => match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
},
// Don't index these.
ModuleDefId::BuiltinType(_) => {}
@@ -190,7 +182,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
- name_ptr: SyntaxNodePtr::new(name.syntax()),
+ name_ptr: AstPtr::new(&name),
};
self.symbols.push(FileSymbol {
@@ -199,6 +191,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
});
}
@@ -211,9 +204,9 @@ impl<'a> SymbolCollector<'a> {
for &id in id {
if id.module(self.db.upcast()) == module_id {
match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
}
}
}
@@ -275,13 +268,13 @@ impl<'a> SymbolCollector<'a> {
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
match assoc_item_id {
- AssocItemId::FunctionId(id) => self.push_decl(id),
- AssocItemId::ConstId(id) => self.push_decl(id),
- AssocItemId::TypeAliasId(id) => self.push_decl(id),
+ AssocItemId::FunctionId(id) => self.push_decl(id, true),
+ AssocItemId::ConstId(id) => self.push_decl(id, true),
+ AssocItemId::TypeAliasId(id) => self.push_decl(id, true),
}
}
- fn push_decl<L>(&mut self, id: L)
+ fn push_decl<L>(&mut self, id: L, is_assoc: bool)
where
L: Lookup + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource,
@@ -294,7 +287,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ name_ptr: AstPtr::new(&name_node),
};
if let Some(attrs) = def.attrs(self.db) {
@@ -305,6 +298,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc,
});
}
}
@@ -315,6 +309,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc,
});
}
@@ -327,7 +322,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ name_ptr: AstPtr::new(&name_node),
};
let def = ModuleDef::Module(module_id.into());
@@ -340,6 +335,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc: false,
});
}
}
@@ -350,6 +346,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
}
}