summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 18:31:44 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 18:31:44 +0000
commitc23a457e72abe608715ac76f076f47dc42af07a5 (patch)
tree2772049aaf84b5c9d0ed12ec8d86812f7a7904b6 /src/tools/rust-analyzer/crates/hir
parentReleasing progress-linux version 1.73.0+dfsg1-1~progress7.99u1. (diff)
downloadrustc-c23a457e72abe608715ac76f076f47dc42af07a5.tar.xz
rustc-c23a457e72abe608715ac76f076f47dc42af07a5.zip
Merging upstream version 1.74.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir')
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs260
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs68
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs133
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs477
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs40
7 files changed, 458 insertions, 540 deletions
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index 0f2fb2c81..796490abd 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -1,38 +1,27 @@
//! Attributes & documentation for hir types.
use hir_def::{
- attr::{AttrsWithOwner, Documentation},
+ attr::AttrsWithOwner,
item_scope::ItemInNs,
- path::ModPath,
- resolver::HasResolver,
- AttrDefId, GenericParamId, ModuleDefId,
+ path::{ModPath, Path},
+ per_ns::Namespace,
+ resolver::{HasResolver, Resolver, TypeNs},
+ AssocItemId, AttrDefId, ModuleDefId,
};
-use hir_expand::hygiene::Hygiene;
+use hir_expand::{hygiene::Hygiene, name::Name};
use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
use crate::{
- Adt, AssocItem, Const, ConstParam, Enum, ExternCrateDecl, Field, Function, GenericParam, Impl,
- LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias,
- TypeParam, Union, Variant,
+ Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
+ Field, Function, GenericParam, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, Struct,
+ Trait, TraitAlias, TypeAlias, TypeParam, Union, Variant, VariantDef,
};
pub trait HasAttrs {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner;
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
- fn resolve_doc_path(
- self,
- db: &dyn HirDatabase,
- link: &str,
- ns: Option<Namespace>,
- ) -> Option<ModuleDef>;
-}
-
-#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
-pub enum Namespace {
- Types,
- Values,
- Macros,
+ #[doc(hidden)]
+ fn attr_id(self) -> AttrDefId;
}
macro_rules! impl_has_attrs {
@@ -42,13 +31,8 @@ macro_rules! impl_has_attrs {
let def = AttrDefId::$def_id(self.into());
db.attrs_with_owner(def)
}
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- let def = AttrDefId::$def_id(self.into());
- db.attrs(def).docs()
- }
- fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
- let def = AttrDefId::$def_id(self.into());
- resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
+ fn attr_id(self) -> AttrDefId {
+ AttrDefId::$def_id(self.into())
}
}
)*};
@@ -68,6 +52,7 @@ impl_has_attrs![
(Module, ModuleId),
(GenericParam, GenericParamId),
(Impl, ImplId),
+ (ExternCrateDecl, ExternCrateId),
];
macro_rules! impl_has_attrs_enum {
@@ -76,11 +61,8 @@ macro_rules! impl_has_attrs_enum {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
$enum::$variant(self).attrs(db)
}
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- $enum::$variant(self).docs(db)
- }
- fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
- $enum::$variant(self).resolve_doc_path(db, link, ns)
+ fn attr_id(self) -> AttrDefId {
+ $enum::$variant(self).attr_id()
}
}
)*};
@@ -97,70 +79,35 @@ impl HasAttrs for AssocItem {
AssocItem::TypeAlias(it) => it.attrs(db),
}
}
-
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- match self {
- AssocItem::Function(it) => it.docs(db),
- AssocItem::Const(it) => it.docs(db),
- AssocItem::TypeAlias(it) => it.docs(db),
- }
- }
-
- fn resolve_doc_path(
- self,
- db: &dyn HirDatabase,
- link: &str,
- ns: Option<Namespace>,
- ) -> Option<ModuleDef> {
+ fn attr_id(self) -> AttrDefId {
match self {
- AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
- AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
- AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
+ AssocItem::Function(it) => it.attr_id(),
+ AssocItem::Const(it) => it.attr_id(),
+ AssocItem::TypeAlias(it) => it.attr_id(),
}
}
}
-impl HasAttrs for ExternCrateDecl {
- fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
- let def = AttrDefId::ExternCrateId(self.into());
- db.attrs_with_owner(def)
- }
- fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
- let crate_docs = self.resolved_crate(db)?.root_module().attrs(db).docs().map(String::from);
- let def = AttrDefId::ExternCrateId(self.into());
- let decl_docs = db.attrs(def).docs().map(String::from);
- match (decl_docs, crate_docs) {
- (None, None) => None,
- (Some(decl_docs), None) => Some(decl_docs),
- (None, Some(crate_docs)) => Some(crate_docs),
- (Some(mut decl_docs), Some(crate_docs)) => {
- decl_docs.push('\n');
- decl_docs.push('\n');
- decl_docs += &crate_docs;
- Some(decl_docs)
- }
- }
- .map(Documentation::new)
- }
- fn resolve_doc_path(
- self,
- db: &dyn HirDatabase,
- link: &str,
- ns: Option<Namespace>,
- ) -> Option<ModuleDef> {
- let def = AttrDefId::ExternCrateId(self.into());
- resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
- }
+/// Resolves the item `link` points to in the scope of `def`.
+pub fn resolve_doc_path_on(
+ db: &dyn HirDatabase,
+ def: impl HasAttrs,
+ link: &str,
+ ns: Option<Namespace>,
+) -> Option<DocLinkDef> {
+ // AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
+ // AttrDefId::EnumVariantId(it) => it.parent.resolver(db.upcast()),
+
+ resolve_doc_path_on_(db, link, def.attr_id(), ns)
}
-/// Resolves the item `link` points to in the scope of `def`.
-fn resolve_doc_path(
+fn resolve_doc_path_on_(
db: &dyn HirDatabase,
- def: AttrDefId,
link: &str,
+ attr_id: AttrDefId,
ns: Option<Namespace>,
-) -> Option<ModuleDefId> {
- let resolver = match def {
+) -> Option<DocLinkDef> {
+ let resolver = match attr_id {
AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
AttrDefId::AdtId(it) => it.resolver(db.upcast()),
@@ -176,16 +123,110 @@ fn resolve_doc_path(
AttrDefId::UseId(it) => it.resolver(db.upcast()),
AttrDefId::MacroId(it) => it.resolver(db.upcast()),
AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()),
- AttrDefId::GenericParamId(it) => match it {
- GenericParamId::TypeParamId(it) => it.parent(),
- GenericParamId::ConstParamId(it) => it.parent(),
- GenericParamId::LifetimeParamId(it) => it.parent,
+ AttrDefId::GenericParamId(_) => return None,
+ };
+
+ let mut modpath = modpath_from_str(db, link)?;
+
+ let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
+ if resolved.is_none() {
+ let last_name = modpath.pop_segment()?;
+ resolve_assoc_or_field(db, resolver, modpath, last_name, ns)
+ } else {
+ let def = match ns {
+ Some(Namespace::Types) => resolved.take_types(),
+ Some(Namespace::Values) => resolved.take_values(),
+ Some(Namespace::Macros) => resolved.take_macros().map(ModuleDefId::MacroId),
+ None => resolved.iter_items().next().map(|(it, _)| match it {
+ ItemInNs::Types(it) => it,
+ ItemInNs::Values(it) => it,
+ ItemInNs::Macros(it) => ModuleDefId::MacroId(it),
+ }),
+ };
+ Some(DocLinkDef::ModuleDef(def?.into()))
+ }
+}
+
+fn resolve_assoc_or_field(
+ db: &dyn HirDatabase,
+ resolver: Resolver,
+ path: ModPath,
+ name: Name,
+ ns: Option<Namespace>,
+) -> Option<DocLinkDef> {
+ let path = Path::from_known_path_with_no_generic(path);
+ // FIXME: This does not handle `Self` on trait definitions, which we should resolve to the
+ // trait itself.
+ let base_def = resolver.resolve_path_in_type_ns_fully(db.upcast(), &path)?;
+
+ let ty = match base_def {
+ TypeNs::SelfType(id) => Impl::from(id).self_ty(db),
+ TypeNs::GenericParam(_) => {
+ // Even if this generic parameter has some trait bounds, rustdoc doesn't
+ // resolve `name` to trait items.
+ return None;
+ }
+ TypeNs::AdtId(id) | TypeNs::AdtSelfType(id) => Adt::from(id).ty(db),
+ TypeNs::EnumVariantId(id) => {
+ // Enum variants don't have path candidates.
+ let variant = Variant::from(id);
+ return resolve_field(db, variant.into(), name, ns);
+ }
+ TypeNs::TypeAliasId(id) => {
+ let alias = TypeAlias::from(id);
+ if alias.as_assoc_item(db).is_some() {
+ // We don't normalize associated type aliases, so we have nothing to
+ // resolve `name` to.
+ return None;
+ }
+ alias.ty(db)
+ }
+ TypeNs::BuiltinType(id) => BuiltinType::from(id).ty(db),
+ TypeNs::TraitId(id) => {
+ // Doc paths in this context may only resolve to an item of this trait
+ // (i.e. no items of its supertraits), so we need to handle them here
+ // independently of others.
+ return db.trait_data(id).items.iter().find(|it| it.0 == name).map(|(_, assoc_id)| {
+ let def = match *assoc_id {
+ AssocItemId::FunctionId(it) => ModuleDef::Function(it.into()),
+ AssocItemId::ConstId(it) => ModuleDef::Const(it.into()),
+ AssocItemId::TypeAliasId(it) => ModuleDef::TypeAlias(it.into()),
+ };
+ DocLinkDef::ModuleDef(def)
+ });
+ }
+ TypeNs::TraitAliasId(_) => {
+ // XXX: Do these get resolved?
+ return None;
}
- .resolver(db.upcast()),
};
- let modpath = {
- // FIXME: this is not how we should get a mod path here
+ // FIXME: Resolve associated items here, e.g. `Option::map`. Note that associated items take
+ // precedence over fields.
+
+ let variant_def = match ty.as_adt()? {
+ Adt::Struct(it) => it.into(),
+ Adt::Union(it) => it.into(),
+ Adt::Enum(_) => return None,
+ };
+ resolve_field(db, variant_def, name, ns)
+}
+
+fn resolve_field(
+ db: &dyn HirDatabase,
+ def: VariantDef,
+ name: Name,
+ ns: Option<Namespace>,
+) -> Option<DocLinkDef> {
+ if let Some(Namespace::Types | Namespace::Macros) = ns {
+ return None;
+ }
+ def.fields(db).into_iter().find(|f| f.name(db) == name).map(DocLinkDef::Field)
+}
+
+fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
+ // FIXME: this is not how we should get a mod path here.
+ let try_get_modpath = |link: &str| {
let ast_path = ast::SourceFile::parse(&format!("type T = {link};"))
.syntax_node()
.descendants()
@@ -193,23 +234,20 @@ fn resolve_doc_path(
if ast_path.syntax().text() != link {
return None;
}
- ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())?
+ ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
};
- let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
- let resolved = if resolved.is_none() {
- resolver.resolve_module_path_in_trait_assoc_items(db.upcast(), &modpath)?
- } else {
- resolved
- };
- match ns {
- Some(Namespace::Types) => resolved.take_types(),
- Some(Namespace::Values) => resolved.take_values(),
- Some(Namespace::Macros) => resolved.take_macros().map(ModuleDefId::MacroId),
- None => resolved.iter_items().next().map(|it| match it {
- ItemInNs::Types(it) => it,
- ItemInNs::Values(it) => it,
- ItemInNs::Macros(it) => ModuleDefId::MacroId(it),
- }),
+ let full = try_get_modpath(link);
+ if full.is_some() {
+ return full;
}
+
+ // Tuple field names cannot be a part of `ModPath` usually, but rustdoc can
+ // resolve doc paths like `TupleStruct::0`.
+ // FIXME: Find a better way to handle these.
+ let (base, maybe_tuple_field) = link.rsplit_once("::")?;
+ let tuple_field = Name::new_tuple_field(maybe_tuple_field.parse().ok()?);
+ let mut modpath = try_get_modpath(base)?;
+ modpath.push_segment(tuple_field);
+ Some(modpath)
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 80c3bcdca..479138b67 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -43,6 +43,7 @@ diagnostics![
MacroExpansionParseError,
MalformedDerive,
MismatchedArgCount,
+ MismatchedTupleStructPatArgCount,
MissingFields,
MissingMatchArms,
MissingUnsafe,
@@ -172,7 +173,8 @@ pub struct MalformedDerive {
#[derive(Debug)]
pub struct NoSuchField {
- pub field: InFile<AstPtr<ast::RecordExprField>>,
+ pub field: InFile<Either<AstPtr<ast::RecordExprField>, AstPtr<ast::RecordPatField>>>,
+ pub private: bool,
}
#[derive(Debug)]
@@ -183,6 +185,13 @@ pub struct PrivateAssocItem {
}
#[derive(Debug)]
+pub struct MismatchedTupleStructPatArgCount {
+ pub expr_or_pat: InFile<Either<AstPtr<ast::Expr>, AstPtr<ast::Pat>>>,
+ pub expected: usize,
+ pub found: usize,
+}
+
+#[derive(Debug)]
pub struct ExpectedFunction {
pub call: InFile<AstPtr<ast::Expr>>,
pub found: Type,
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index 9dfb98e45..ac171026d 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -8,7 +8,6 @@ use hir_def::{
type_ref::{TypeBound, TypeRef},
AdtId, GenericDefId,
};
-use hir_expand::name;
use hir_ty::{
display::{
write_bounds_like_dyn_trait_with_prefix, write_visibility, HirDisplay, HirDisplayError,
@@ -19,8 +18,9 @@ use hir_ty::{
use crate::{
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field,
- Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct,
- Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
+ Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, SelfParam,
+ Static, Struct, Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam,
+ Union, Variant,
};
impl HirDisplay for Function {
@@ -57,37 +57,21 @@ impl HirDisplay for Function {
f.write_char('(')?;
- let write_self_param = |ty: &TypeRef, f: &mut HirFormatter<'_>| match ty {
- TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
- TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) =>
- {
- f.write_char('&')?;
- if let Some(lifetime) = lifetime {
- write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
- }
- if let hir_def::type_ref::Mutability::Mut = mut_ {
- f.write_str("mut ")?;
- }
- f.write_str("self")
- }
- _ => {
- f.write_str("self: ")?;
- ty.hir_fmt(f)
- }
- };
-
let mut first = true;
+ let mut skip_self = 0;
+ if let Some(self_param) = self.self_param(db) {
+ self_param.hir_fmt(f)?;
+ first = false;
+ skip_self = 1;
+ }
+
// FIXME: Use resolved `param.ty` once we no longer discard lifetimes
- for (type_ref, param) in data.params.iter().zip(self.assoc_fn_params(db)) {
+ for (type_ref, param) in data.params.iter().zip(self.assoc_fn_params(db)).skip(skip_self) {
let local = param.as_local(db).map(|it| it.name(db));
if !first {
f.write_str(", ")?;
} else {
first = false;
- if local == Some(name!(self)) {
- write_self_param(type_ref, f)?;
- continue;
- }
}
match local {
Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?,
@@ -137,6 +121,31 @@ impl HirDisplay for Function {
}
}
+impl HirDisplay for SelfParam {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = f.db.function_data(self.func);
+ let param = data.params.first().unwrap();
+ match &**param {
+ TypeRef::Path(p) if p.is_self_type() => f.write_str("self"),
+ TypeRef::Reference(inner, lifetime, mut_) if matches!(&**inner, TypeRef::Path(p) if p.is_self_type()) =>
+ {
+ f.write_char('&')?;
+ if let Some(lifetime) = lifetime {
+ write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
+ }
+ if let hir_def::type_ref::Mutability::Mut = mut_ {
+ f.write_str("mut ")?;
+ }
+ f.write_str("self")
+ }
+ ty => {
+ f.write_str("self: ")?;
+ ty.hir_fmt(f)
+ }
+ }
+ }
+}
+
impl HirDisplay for Adt {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
@@ -357,6 +366,11 @@ fn write_generic_params(
delim(f)?;
write!(f, "const {}: ", name.display(f.db.upcast()))?;
c.ty.hir_fmt(f)?;
+
+ if let Some(default) = &c.default {
+ f.write_str(" = ")?;
+ write!(f, "{}", default.display(f.db.upcast()))?;
+ }
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index bf041b61f..b215ed38f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -63,12 +63,13 @@ use hir_ty::{
all_super_traits, autoderef,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
+ known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
mir::{self, interpret_mir},
primitive::UintTy,
traits::FnTrait,
- AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId,
+ AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, ValueTyDefId,
WhereClause,
@@ -87,13 +88,14 @@ use triomphe::Arc;
use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
- attrs::{HasAttrs, Namespace},
+ attrs::{resolve_doc_path_on, HasAttrs},
diagnostics::{
AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
- MacroExpansionParseError, MalformedDerive, MismatchedArgCount, MissingFields,
- MissingMatchArms, MissingUnsafe, MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem,
- PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
+ MacroExpansionParseError, MalformedDerive, MismatchedArgCount,
+ MismatchedTupleStructPatArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
+ MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
+ ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
UnresolvedProcMacro, UnusedMut,
@@ -114,13 +116,14 @@ pub use crate::{
pub use {
cfg::{CfgAtom, CfgExpr, CfgOptions},
hir_def::{
- attr::{builtin::AttributeTemplate, Attrs, AttrsWithOwner, Documentation},
+ attr::{builtin::AttributeTemplate, AttrSourceMap, Attrs, AttrsWithOwner},
data::adt::StructKind,
find_path::PrefixKind,
import_map,
lang_item::LangItem,
nameres::{DefMap, ModuleSource},
path::{ModPath, PathKind},
+ per_ns::Namespace,
type_ref::{Mutability, TypeRef},
visibility::Visibility,
// FIXME: This is here since some queries take it as input that are used
@@ -128,7 +131,7 @@ pub use {
{AdtId, ModuleDefId},
},
hir_expand::{
- attrs::Attr,
+ attrs::{Attr, AttrId},
name::{known, Name},
ExpandResult, HirFileId, InFile, MacroFile, Origin,
},
@@ -561,8 +564,8 @@ impl Module {
emit_def_diagnostic(db, acc, diag);
}
- for decl in self.declarations(db) {
- match decl {
+ for def in self.declarations(db) {
+ match def {
ModuleDef::Module(m) => {
// Only add diagnostics from inline modules
if def_map[m.id.local_id].origin.is_inline() {
@@ -573,7 +576,7 @@ impl Module {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
- acc.extend(decl.diagnostics(db))
+ acc.extend(def.diagnostics(db))
}
ModuleDef::Adt(adt) => {
match adt {
@@ -597,10 +600,10 @@ impl Module {
}
}
}
- acc.extend(decl.diagnostics(db))
+ acc.extend(def.diagnostics(db))
}
ModuleDef::Macro(m) => emit_macro_def_diagnostics(db, acc, m),
- _ => acc.extend(decl.diagnostics(db)),
+ _ => acc.extend(def.diagnostics(db)),
}
}
self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m));
@@ -719,20 +722,18 @@ fn emit_def_diagnostic_(
) {
match diag {
DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
- let decl = declaration.to_node(db.upcast());
+ let decl = declaration.to_ptr(db.upcast());
acc.push(
UnresolvedModule {
- decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
+ decl: InFile::new(declaration.file_id, decl),
candidates: candidates.clone(),
}
.into(),
)
}
DefDiagnosticKind::UnresolvedExternCrate { ast } => {
- let item = ast.to_node(db.upcast());
- acc.push(
- UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
- );
+ let item = ast.to_ptr(db.upcast());
+ acc.push(UnresolvedExternCrate { decl: InFile::new(ast.file_id, item) }.into());
}
DefDiagnosticKind::UnresolvedImport { id, index } => {
@@ -747,14 +748,10 @@ fn emit_def_diagnostic_(
}
DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
- let item = ast.to_node(db.upcast());
+ let item = ast.to_ptr(db.upcast());
acc.push(
- InactiveCode {
- node: ast.with_value(SyntaxNodePtr::new(&item).into()),
- cfg: cfg.clone(),
- opts: opts.clone(),
- }
- .into(),
+ InactiveCode { node: ast.with_value(item), cfg: cfg.clone(), opts: opts.clone() }
+ .into(),
);
}
DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
@@ -1273,7 +1270,7 @@ impl Adt {
.fill(|x| {
let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
match x {
- ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Type => r.cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
}
})
@@ -1450,6 +1447,7 @@ impl DefWithBody {
}
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ db.unwind_if_cancelled();
let krate = self.module(db).id.krate();
let (body, source_map) = db.body_with_source_map(self.into());
@@ -1505,11 +1503,19 @@ impl DefWithBody {
let infer = db.infer(self.into());
let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
let expr_syntax = |expr| source_map.expr_syntax(expr).expect("unexpected synthetic");
+ let pat_syntax = |pat| source_map.pat_syntax(pat).expect("unexpected synthetic");
for d in &infer.diagnostics {
match d {
- &hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
- let field = source_map.field_syntax(expr);
- acc.push(NoSuchField { field }.into())
+ &hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
+ let expr_or_pat = match expr {
+ ExprOrPatId::ExprId(expr) => {
+ source_map.field_syntax(expr).map(Either::Left)
+ }
+ ExprOrPatId::PatId(pat) => {
+ source_map.pat_field_syntax(pat).map(Either::Right)
+ }
+ };
+ acc.push(NoSuchField { field: expr_or_pat, private }.into())
}
&hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
acc.push(
@@ -1525,10 +1531,7 @@ impl DefWithBody {
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
- ExprOrPatId::PatId(pat) => source_map
- .pat_syntax(pat)
- .expect("unexpected synthetic")
- .map(Either::Right),
+ ExprOrPatId::PatId(pat) => pat_syntax(pat).map(Either::Right),
};
let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
@@ -1600,6 +1603,23 @@ impl DefWithBody {
.into(),
)
}
+ &hir_ty::InferenceDiagnostic::MismatchedTupleStructPatArgCount {
+ pat,
+ expected,
+ found,
+ } => {
+ let expr_or_pat = match pat {
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
+ ExprOrPatId::PatId(pat) => source_map
+ .pat_syntax(pat)
+ .expect("unexpected synthetic")
+ .map(|it| it.unwrap_left())
+ .map(Either::Right),
+ };
+ acc.push(
+ MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
+ )
+ }
}
}
for (pat_or_expr, mismatch) in infer.type_mismatches() {
@@ -2096,14 +2116,6 @@ impl SelfParam {
.unwrap_or(Access::Owned)
}
- pub fn display(self, db: &dyn HirDatabase) -> &'static str {
- match self.access(db) {
- Access::Shared => "&self",
- Access::Exclusive => "&mut self",
- Access::Owned => "self",
- }
- }
-
pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
let InFile { file_id, value } = Function::from(self.func).source(db)?;
value
@@ -3142,12 +3154,8 @@ impl TypeParam {
}
pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
- let params = db.generic_defaults(self.id.parent());
- let local_idx = hir_ty::param_idx(db, self.id.into())?;
+ let ty = generic_arg_from_param(db, self.id.into())?;
let resolver = self.id.parent().resolver(db.upcast());
- let ty = params.get(local_idx)?.clone();
- let subst = TyBuilder::placeholder_subst(db, self.id.parent());
- let ty = ty.substitute(Interner, &subst);
match ty.data(Interner) {
GenericArgData::Ty(it) => {
Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
@@ -3209,6 +3217,19 @@ impl ConstParam {
pub fn ty(self, db: &dyn HirDatabase) -> Type {
Type::new(db, self.id.parent(), db.const_param_ty(self.id))
}
+
+ pub fn default(self, db: &dyn HirDatabase) -> Option<ast::ConstArg> {
+ let arg = generic_arg_from_param(db, self.id.into())?;
+ known_const_to_ast(arg.constant(Interner)?, db)
+ }
+}
+
+fn generic_arg_from_param(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<GenericArg> {
+ let params = db.generic_defaults(id.parent);
+ let local_idx = hir_ty::param_idx(db, id)?;
+ let ty = params.get(local_idx)?.clone();
+ let subst = TyBuilder::placeholder_subst(db, id.parent);
+ Some(ty.substitute(Interner, &subst))
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -3716,7 +3737,7 @@ impl Type {
.fill(|x| {
let r = it.next().unwrap();
match x {
- ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Type => r.cast(Interner),
ParamKind::Const(ty) => {
// FIXME: this code is not covered in tests.
unknown_const_as_generic(ty.clone())
@@ -3749,9 +3770,7 @@ impl Type {
.fill(|it| {
// FIXME: this code is not covered in tests.
match it {
- ParamKind::Type => {
- GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
- }
+ ParamKind::Type => args.next().unwrap().ty.clone().cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
}
})
@@ -4414,14 +4433,13 @@ impl Callable {
Other => CallableKind::Other,
}
}
- pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(ast::SelfParam, Type)> {
+ pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<(SelfParam, Type)> {
let func = match self.callee {
Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
_ => return None,
};
- let src = func.lookup(db.upcast()).source(db.upcast());
- let param_list = src.value.param_list()?;
- Some((param_list.self_param()?, self.ty.derived(self.sig.params()[0].clone())))
+ let func = Function { id: func };
+ Some((func.self_param(db)?, self.ty.derived(self.sig.params()[0].clone())))
}
pub fn n_params(&self) -> usize {
self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
@@ -4844,3 +4862,10 @@ pub enum ItemContainer {
ExternBlock(),
Crate(CrateId),
}
+
+/// Subset of `ide_db::Definition` that doc links can resolve to.
+pub enum DocLinkDef {
+ ModuleDef(ModuleDef),
+ Field(Field),
+ SelfType(Trait),
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index e99d2984c..a42e0978b 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -127,148 +127,24 @@ impl<DB> fmt::Debug for Semantics<'_, DB> {
}
}
+impl<'db, DB> ops::Deref for Semantics<'db, DB> {
+ type Target = SemanticsImpl<'db>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.imp
+ }
+}
+
impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn new(db: &DB) -> Semantics<'_, DB> {
let impl_ = SemanticsImpl::new(db);
Semantics { db, imp: impl_ }
}
- pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
- self.imp.parse(file_id)
- }
-
- pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
- self.imp.parse_or_expand(file_id)
- }
-
- pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
- self.imp.expand(macro_call)
- }
-
- /// If `item` has an attribute macro attached to it, expands it.
- pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
- self.imp.expand_attr_macro(item)
- }
-
- pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
- self.imp.expand_derive_as_pseudo_attr_macro(attr)
- }
-
- pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
- self.imp.resolve_derive_macro(derive)
- }
-
- pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
- self.imp.expand_derive_macro(derive)
- }
-
- pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
- self.imp.is_attr_macro_call(item)
- }
-
- pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
- self.imp.is_derive_annotated(item)
- }
-
- pub fn speculative_expand(
- &self,
- actual_macro_call: &ast::MacroCall,
- speculative_args: &ast::TokenTree,
- token_to_map: SyntaxToken,
- ) -> Option<(SyntaxNode, SyntaxToken)> {
- self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
- }
-
- pub fn speculative_expand_attr_macro(
- &self,
- actual_macro_call: &ast::Item,
- speculative_args: &ast::Item,
- token_to_map: SyntaxToken,
- ) -> Option<(SyntaxNode, SyntaxToken)> {
- self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
- }
-
- pub fn speculative_expand_derive_as_pseudo_attr_macro(
- &self,
- actual_macro_call: &ast::Attr,
- speculative_args: &ast::Attr,
- token_to_map: SyntaxToken,
- ) -> Option<(SyntaxNode, SyntaxToken)> {
- self.imp.speculative_expand_derive_as_pseudo_attr_macro(
- actual_macro_call,
- speculative_args,
- token_to_map,
- )
- }
-
- /// Descend the token into macrocalls to its first mapped counterpart.
- pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
- self.imp.descend_into_macros_single(token)
- }
-
- /// Descend the token into macrocalls to all its mapped counterparts.
- pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
- self.imp.descend_into_macros(token)
- }
-
- /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
- ///
- /// Returns the original non descended token if none of the mapped counterparts have the same text.
- pub fn descend_into_macros_with_same_text(
- &self,
- token: SyntaxToken,
- ) -> SmallVec<[SyntaxToken; 1]> {
- self.imp.descend_into_macros_with_same_text(token)
- }
-
- pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
- self.imp.descend_into_macros_with_kind_preference(token)
- }
-
- /// Maps a node down by mapping its first and last token down.
- pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
- self.imp.descend_node_into_attributes(node)
- }
-
- /// Search for a definition's source and cache its syntax tree
- pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
- where
- Def::Ast: AstNode,
- {
- self.imp.source(def)
- }
-
pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
self.imp.find_file(syntax_node).file_id
}
- /// Attempts to map the node out of macro expanded files returning the original file range.
- /// If upmapping is not possible, this will fall back to the range of the macro call of the
- /// macro file the node resides in.
- pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
- self.imp.original_range(node)
- }
-
- /// Attempts to map the node out of macro expanded files returning the original file range.
- pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
- self.imp.original_range_opt(node)
- }
-
- /// Attempts to map the node out of macro expanded files.
- /// This only work for attribute expansions, as other ones do not have nodes as input.
- pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
- self.imp.original_ast_node(node)
- }
- /// Attempts to map the node out of macro expanded files.
- /// This only work for attribute expansions, as other ones do not have nodes as input.
- pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
- self.imp.original_syntax_node(node)
- }
-
- pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
- self.imp.diagnostics_display_range(diagnostics)
- }
-
pub fn token_ancestors_with_macros(
&self,
token: SyntaxToken,
@@ -276,19 +152,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
}
- /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
- pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
- self.imp.ancestors_with_macros(node)
- }
-
- pub fn ancestors_at_offset_with_macros(
- &self,
- node: &SyntaxNode,
- offset: TextSize,
- ) -> impl Iterator<Item = SyntaxNode> + '_ {
- self.imp.ancestors_at_offset_with_macros(node, offset)
- }
-
/// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
/// search up until it is of the target AstNode type
pub fn find_node_at_offset_with_macros<N: AstNode>(
@@ -319,53 +182,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}
- pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
- self.imp.resolve_lifetime_param(lifetime)
- }
-
- pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
- self.imp.resolve_label(lifetime)
- }
-
- pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
- self.imp.resolve_type(ty)
- }
-
- pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
- self.imp.resolve_trait(trait_)
- }
-
- pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
- self.imp.expr_adjustments(expr)
- }
-
- pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
- self.imp.type_of_expr(expr)
- }
-
- pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
- self.imp.type_of_pat(pat)
- }
-
- /// It also includes the changes that binding mode makes in the type. For example in
- /// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
- /// of this function is `&mut Option<T>`
- pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
- self.imp.type_of_binding_in_pat(pat)
- }
-
- pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
- self.imp.type_of_self(param)
- }
-
- pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
- self.imp.pattern_adjustments(pat)
- }
-
- pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
- self.imp.binding_mode_of_pat(pat)
- }
-
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.imp.resolve_method_call(call).map(Function::from)
}
@@ -400,61 +216,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_try_expr(try_expr).map(Function::from)
}
- pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
- self.imp.resolve_method_call_as_callable(call)
- }
-
- pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
- self.imp.resolve_field(field)
- }
-
- pub fn resolve_record_field(
- &self,
- field: &ast::RecordExprField,
- ) -> Option<(Field, Option<Local>, Type)> {
- self.imp.resolve_record_field(field)
- }
-
- pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
- self.imp.resolve_record_pat_field(field)
- }
-
- pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
- self.imp.resolve_macro_call(macro_call)
- }
-
- pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
- self.imp.is_unsafe_macro_call(macro_call)
- }
-
- pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
- self.imp.resolve_attr_macro_call(item)
- }
-
- pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
- self.imp.resolve_path(path)
- }
-
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
self.imp.resolve_variant(record_lit).map(VariantDef::from)
}
- pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
- self.imp.resolve_bind_pat_to_const(pat)
- }
-
- pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
- self.imp.record_literal_missing_fields(literal)
- }
-
- pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
- self.imp.record_pattern_missing_fields(pattern)
- }
-
- pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
- self.imp.to_def(src)
- }
-
pub fn to_module_def(&self, file: FileId) -> Option<Module> {
self.imp.to_module_def(file).next()
}
@@ -462,39 +227,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.imp.to_module_def(file)
}
-
- pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
- self.imp.scope(node)
- }
-
- pub fn scope_at_offset(
- &self,
- node: &SyntaxNode,
- offset: TextSize,
- ) -> Option<SemanticsScope<'db>> {
- self.imp.scope_at_offset(node, offset)
- }
-
- pub fn assert_contains_node(&self, node: &SyntaxNode) {
- self.imp.assert_contains_node(node)
- }
-
- pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
- self.imp.is_unsafe_method_call(method_call_expr)
- }
-
- pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
- self.imp.is_unsafe_ref_expr(ref_expr)
- }
-
- pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
- self.imp.is_unsafe_ident_pat(ident_pat)
- }
-
- /// Returns `true` if the `node` is inside an `unsafe` context.
- pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
- self.imp.is_inside_unsafe(expr)
- }
}
impl<'db> SemanticsImpl<'db> {
@@ -508,32 +240,33 @@ impl<'db> SemanticsImpl<'db> {
}
}
- fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
let tree = self.db.parse(file_id).tree();
self.cache(tree.syntax().clone(), file_id.into());
tree
}
- fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
+ pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
let node = self.db.parse_or_expand(file_id);
self.cache(node.clone(), file_id);
node
}
- fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let node = self.parse_or_expand(file_id);
Some(node)
}
- fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ /// If `item` has an attribute macro attached to it, expands it.
+ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
Some(self.parse_or_expand(macro_call_id.as_file()))
}
- fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let src = self.wrap_node_infile(attr.clone());
let call_id = self.with_ctx(|ctx| {
@@ -542,7 +275,7 @@ impl<'db> SemanticsImpl<'db> {
Some(self.parse_or_expand(call_id.as_file()))
}
- fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
let calls = self.derive_macro_calls(attr)?;
self.with_ctx(|ctx| {
Some(
@@ -556,7 +289,7 @@ impl<'db> SemanticsImpl<'db> {
})
}
- fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
let res: Vec<_> = self
.derive_macro_calls(attr)?
.into_iter()
@@ -581,19 +314,21 @@ impl<'db> SemanticsImpl<'db> {
})
}
- fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
+ pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
let file_id = self.find_file(adt.syntax()).file_id;
let adt = InFile::new(file_id, adt);
self.with_ctx(|ctx| ctx.has_derives(adt))
}
- fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id;
let src = InFile::new(file_id, item.clone());
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
}
- fn speculative_expand(
+ /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
+ /// expansion. `token_to_map` should be a token from the `speculative args` node.
+ pub fn speculative_expand(
&self,
actual_macro_call: &ast::MacroCall,
speculative_args: &ast::TokenTree,
@@ -606,7 +341,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
resolver
.resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang))
- .map(|it| macro_id_to_def_id(self.db.upcast(), it))
+ .map(|(it, _)| macro_id_to_def_id(self.db.upcast(), it))
})?;
hir_expand::db::expand_speculative(
self.db.upcast(),
@@ -616,7 +351,9 @@ impl<'db> SemanticsImpl<'db> {
)
}
- fn speculative_expand_attr(
+ /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
+ /// expansion. `token_to_map` should be a token from the `speculative args` node.
+ pub fn speculative_expand_attr_macro(
&self,
actual_macro_call: &ast::Item,
speculative_args: &ast::Item,
@@ -632,7 +369,7 @@ impl<'db> SemanticsImpl<'db> {
)
}
- fn speculative_expand_derive_as_pseudo_attr_macro(
+ pub fn speculative_expand_derive_as_pseudo_attr_macro(
&self,
actual_macro_call: &ast::Attr,
speculative_args: &ast::Attr,
@@ -651,8 +388,9 @@ impl<'db> SemanticsImpl<'db> {
)
}
- // This might not be the correct way to do this, but it works for now
- fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ /// Maps a node down by mapping its first and last token down.
+ pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ // This might not be the correct way to do this, but it works for now
let mut res = smallvec![];
let tokens = (|| {
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
@@ -665,7 +403,7 @@ impl<'db> SemanticsImpl<'db> {
};
if first == last {
- self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
if let Some(node) = value.parent_ancestors().find_map(N::cast) {
res.push(node)
}
@@ -674,7 +412,7 @@ impl<'db> SemanticsImpl<'db> {
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
- self.descend_into_macros_impl(first, &mut |token| {
+ self.descend_into_macros_impl(first, 0.into(), &mut |token| {
scratch.push(token);
false
});
@@ -682,6 +420,7 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
+ 0.into(),
&mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@@ -705,19 +444,33 @@ impl<'db> SemanticsImpl<'db> {
res
}
- fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ /// Descend the token into its macro call if it is part of one, returning the tokens in the
+ /// expansion that it is associated with. If `offset` points into the token's range, it will
+ /// be considered for the mapping in case of inline format args.
+ pub fn descend_into_macros(
+ &self,
+ token: SyntaxToken,
+ offset: TextSize,
+ ) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
- self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res.push(value);
false
});
res
}
- fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
+ ///
+ /// Returns the original non descended token if none of the mapped counterparts have the same text.
+ pub fn descend_into_macros_with_same_text(
+ &self,
+ token: SyntaxToken,
+ offset: TextSize,
+ ) -> SmallVec<[SyntaxToken; 1]> {
let text = token.text();
let mut res = smallvec![];
- self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if value.text() == text {
res.push(value);
}
@@ -729,7 +482,11 @@ impl<'db> SemanticsImpl<'db> {
res
}
- fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ pub fn descend_into_macros_with_kind_preference(
+ &self,
+ token: SyntaxToken,
+ offset: TextSize,
+ ) -> SyntaxToken {
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
@@ -741,7 +498,7 @@ impl<'db> SemanticsImpl<'db> {
};
let preferred_kind = fetch_kind(&token);
let mut res = None;
- self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if fetch_kind(&value) == preferred_kind {
res = Some(value);
true
@@ -755,9 +512,12 @@ impl<'db> SemanticsImpl<'db> {
res.unwrap_or(token)
}
- fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ /// Descend the token into its macro call if it is part of one, returning the token in the
+ /// expansion that it is associated with. If `offset` points into the token's range, it will
+ /// be considered for the mapping in case of inline format args.
+ pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
let mut res = token.clone();
- self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res = value;
true
});
@@ -767,9 +527,13 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
+ // FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
+ // mapping, specifically for node downmapping
+ offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
) {
let _p = profile::span("descend_into_macros");
+ let relative_token_offset = token.text_range().start().checked_sub(offset);
let parent = match token.parent() {
Some(it) => it,
None => return,
@@ -796,7 +560,12 @@ impl<'db> SemanticsImpl<'db> {
self.cache(value, file_id);
}
- let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+ let mapped_tokens = expansion_info.map_token_down(
+ self.db.upcast(),
+ item,
+ token,
+ relative_token_offset,
+ )?;
let len = stack.len();
// requeue the tokens we got from mapping our current token down
@@ -943,7 +712,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
- .map(move |token| self.descend_into_macros(token))
+ .map(move |token| self.descend_into_macros(token, offset))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})
@@ -956,17 +725,23 @@ impl<'db> SemanticsImpl<'db> {
})
}
- fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ /// If upmapping is not possible, this will fall back to the range of the macro call of the
+ /// macro file the node resides in.
+ pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
let node = self.find_file(node);
node.original_file_range(self.db.upcast())
}
- fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast())
}
- fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
|InFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id);
@@ -975,7 +750,9 @@ impl<'db> SemanticsImpl<'db> {
)
}
- fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|InFile { file_id, value }| {
@@ -985,7 +762,7 @@ impl<'db> SemanticsImpl<'db> {
)
}
- fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+ pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
let root = self.parse_or_expand(src.file_id);
let node = src.map(|it| it.to_node(&root));
node.as_ref().original_file_range(self.db.upcast())
@@ -998,7 +775,8 @@ impl<'db> SemanticsImpl<'db> {
token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
}
- fn ancestors_with_macros(
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ pub fn ancestors_with_macros(
&self,
node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
@@ -1016,7 +794,7 @@ impl<'db> SemanticsImpl<'db> {
.map(|it| it.value)
}
- fn ancestors_at_offset_with_macros(
+ pub fn ancestors_at_offset_with_macros(
&self,
node: &SyntaxNode,
offset: TextSize,
@@ -1026,7 +804,7 @@ impl<'db> SemanticsImpl<'db> {
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
}
- fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
let text = lifetime.text();
let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
@@ -1037,7 +815,7 @@ impl<'db> SemanticsImpl<'db> {
ToDef::to_def(self, src)
}
- fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
let text = lifetime.text();
let label = lifetime.syntax().ancestors().find_map(|syn| {
let label = match_ast! {
@@ -1059,7 +837,7 @@ impl<'db> SemanticsImpl<'db> {
ToDef::to_def(self, src)
}
- fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
let ty = hir_ty::TyLoweringContext::new(
@@ -1071,7 +849,7 @@ impl<'db> SemanticsImpl<'db> {
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
}
- fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+ pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?;
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
@@ -1082,7 +860,7 @@ impl<'db> SemanticsImpl<'db> {
}
}
- fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
+ pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjustment>> {
let mutability = |m| match m {
hir_ty::Mutability::Not => Mutability::Shared,
hir_ty::Mutability::Mut => Mutability::Mut,
@@ -1126,33 +904,36 @@ impl<'db> SemanticsImpl<'db> {
})
}
- fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
self.analyze(expr.syntax())?
.type_of_expr(self.db, expr)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
}
- fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
self.analyze(pat.syntax())?
.type_of_pat(self.db, pat)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
}
- fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
+ /// It also includes the changes that binding mode makes in the type. For example in
+ /// `let ref x @ Some(_) = None` the result of `type_of_pat` is `Option<T>` but the result
+ /// of this function is `&mut Option<T>`
+ pub fn type_of_binding_in_pat(&self, pat: &ast::IdentPat) -> Option<Type> {
self.analyze(pat.syntax())?.type_of_binding_in_pat(self.db, pat)
}
- fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
self.analyze(param.syntax())?.type_of_self(self.db, param)
}
- fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
self.analyze(pat.syntax())
.and_then(|it| it.pattern_adjustments(self.db, pat))
.unwrap_or_default()
}
- fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
@@ -1187,32 +968,32 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
}
- fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
}
- fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
self.analyze(field.syntax())?.resolve_field(self.db, field)
}
- fn resolve_record_field(
+ pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
) -> Option<(Field, Option<Local>, Type)> {
self.analyze(field.syntax())?.resolve_record_field(self.db, field)
}
- fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
+ pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<(Field, Type)> {
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
}
- fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
let sa = self.analyze(macro_call.syntax())?;
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
sa.resolve_macro_call(self.db, macro_call)
}
- fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let sa = match self.analyze(macro_call.syntax()) {
Some(it) => it,
None => return false,
@@ -1221,7 +1002,7 @@ impl<'db> SemanticsImpl<'db> {
sa.is_unsafe_macro_call(self.db, macro_call)
}
- fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| {
let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
@@ -1230,7 +1011,7 @@ impl<'db> SemanticsImpl<'db> {
Some(Macro { id })
}
- fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.analyze(path.syntax())?.resolve_path(self.db, path)
}
@@ -1238,17 +1019,17 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
}
- fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
}
- fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.analyze(literal.syntax())
.and_then(|it| it.record_literal_missing_fields(self.db, literal))
.unwrap_or_default()
}
- fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
self.analyze(pattern.syntax())
.and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
.unwrap_or_default()
@@ -1260,7 +1041,7 @@ impl<'db> SemanticsImpl<'db> {
f(&mut ctx)
}
- fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
+ pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax()).with_value(src).cloned();
T::to_def(self, src)
}
@@ -1269,7 +1050,7 @@ impl<'db> SemanticsImpl<'db> {
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
}
- fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db,
file_id,
@@ -1277,7 +1058,11 @@ impl<'db> SemanticsImpl<'db> {
})
}
- fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
+ pub fn scope_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SemanticsScope<'db>> {
self.analyze_with_offset_no_infer(node, offset).map(
|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db,
@@ -1287,7 +1072,8 @@ impl<'db> SemanticsImpl<'db> {
)
}
- fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ /// Search for a definition's source and cache its syntax tree
+ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
where
Def::Ast: AstNode,
{
@@ -1352,7 +1138,7 @@ impl<'db> SemanticsImpl<'db> {
assert!(prev == None || prev == Some(file_id))
}
- fn assert_contains_node(&self, node: &SyntaxNode) {
+ pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.find_file(node);
}
@@ -1388,7 +1174,7 @@ impl<'db> SemanticsImpl<'db> {
InFile::new(file_id, node)
}
- fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
method_call_expr
.receiver()
.and_then(|expr| {
@@ -1411,7 +1197,7 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or(false)
}
- fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
ref_expr
.expr()
.and_then(|expr| {
@@ -1430,7 +1216,7 @@ impl<'db> SemanticsImpl<'db> {
// more than it should with the current implementation.
}
- fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
if ident_pat.ref_token().is_none() {
return false;
}
@@ -1473,7 +1259,8 @@ impl<'db> SemanticsImpl<'db> {
.unwrap_or(false)
}
- fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
+ /// Returns `true` if the `node` is inside an `unsafe` context.
+ pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
let Some(enclosing_item) =
expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
else {
@@ -1683,6 +1470,14 @@ impl SemanticsScope<'_> {
|name, id| cb(name, id.into()),
)
}
+
+ pub fn extern_crates(&self) -> impl Iterator<Item = (Name, Module)> + '_ {
+ self.resolver.extern_crates_in_scope().map(|(name, id)| (name, Module { id }))
+ }
+
+ pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
+ self.resolver.extern_crate_decls_in_scope(self.db.upcast())
+ }
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index 3499daf11..f29fb1edf 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -487,7 +487,7 @@ impl SourceAnalyzer {
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
- .map(|it| it.into())
+ .map(|(it, _)| it.into())
}
pub(crate) fn resolve_bind_pat_to_const(
@@ -760,7 +760,7 @@ impl SourceAnalyzer {
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
- .map(|it| macro_id_to_def_id(db.upcast(), it))
+ .map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?;
Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
}
@@ -966,6 +966,7 @@ pub(crate) fn resolve_hir_path_as_attr_macro(
) -> Option<Macro> {
resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Attr))
+ .map(|(it, _)| it)
.map(Into::into)
}
@@ -983,7 +984,7 @@ fn resolve_hir_path_(
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
- let (ty, remaining_idx) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
+ let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
match remaining_idx {
Some(remaining_idx) => {
if remaining_idx + 1 == path.segments().len() {
@@ -1067,7 +1068,7 @@ fn resolve_hir_path_(
let macros = || {
resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, None)
- .map(|def| PathResolution::Def(ModuleDef::Macro(def.into())))
+ .map(|(def, _)| PathResolution::Def(ModuleDef::Macro(def.into())))
};
if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index 43d957412..ca7874c36 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -2,8 +2,10 @@
use base_db::FileRange;
use hir_def::{
- src::HasSource, AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId,
- ModuleDefId, ModuleId, TraitId,
+ item_scope::ItemInNs,
+ src::{HasChildSource, HasSource},
+ AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId,
+ TraitId,
};
use hir_expand::{HirFileId, InFile};
use hir_ty::db::HirDatabase;
@@ -167,6 +169,40 @@ impl<'a> SymbolCollector<'a> {
self.collect_from_impl(impl_id);
}
+ // Record renamed imports.
+ // In case it imports multiple items under different namespaces we just pick one arbitrarily
+ // for now.
+ for id in scope.imports() {
+ let loc = id.import.lookup(self.db.upcast());
+ loc.id.item_tree(self.db.upcast());
+ let source = id.import.child_source(self.db.upcast());
+ let Some(use_tree_src) = source.value.get(id.idx) else { continue };
+ let Some(rename) = use_tree_src.rename() else { continue };
+ let Some(name) = rename.name() else { continue };
+
+ let res = scope.fully_resolve_import(self.db.upcast(), id);
+ res.iter_items().for_each(|(item, _)| {
+ let def = match item {
+ ItemInNs::Types(def) | ItemInNs::Values(def) => def,
+ ItemInNs::Macros(def) => ModuleDefId::from(def),
+ }
+ .into();
+ let dec_loc = DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
+ name_ptr: SyntaxNodePtr::new(name.syntax()),
+ };
+
+ self.symbols.push(FileSymbol {
+ name: name.text().into(),
+ def,
+ container_name: self.current_container_name.clone(),
+ loc: dec_loc,
+ is_alias: false,
+ });
+ });
+ }
+
for const_id in scope.unnamed_consts() {
self.collect_from_body(const_id);
}