summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir-expand
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir-expand')
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs151
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs713
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs335
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs256
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs240
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs214
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs102
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs80
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs53
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/quote.rs6
15 files changed, 1525 insertions, 661 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 5c684be03..40d8659f2 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -22,6 +22,7 @@ hashbrown = { version = "0.12.1", features = [
"inline-more",
], default-features = false }
smallvec.workspace = true
+triomphe.workspace = true
# local deps
stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
index 2b27db0e9..c2b0d5985 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -20,7 +20,7 @@ use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstNode> {
raw: ErasedFileAstId,
- _ty: PhantomData<fn() -> N>,
+ covariant: PhantomData<fn() -> N>,
}
impl<N: AstNode> Clone for FileAstId<N> {
@@ -54,7 +54,7 @@ impl<N: AstNode> FileAstId<N> {
where
N: Into<M>,
{
- FileAstId { raw: self.raw, _ty: PhantomData }
+ FileAstId { raw: self.raw, covariant: PhantomData }
}
}
@@ -98,6 +98,7 @@ impl AstIdMap {
|| ast::Variant::can_cast(kind)
|| ast::RecordField::can_cast(kind)
|| ast::TupleField::can_cast(kind)
+ || ast::ConstArg::can_cast(kind)
{
res.alloc(&it);
true
@@ -115,12 +116,17 @@ impl AstIdMap {
}
}
}
+ res.arena.shrink_to_fit();
res
}
pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
- FileAstId { raw, _ty: PhantomData }
+ FileAstId { raw, covariant: PhantomData }
+ }
+
+ pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
+ AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
@@ -136,10 +142,6 @@ impl AstIdMap {
}
}
- pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
- AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
- }
-
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
self.arena.alloc(SyntaxNodePtr::new(item))
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index 8d1e88725..4c918e55b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -1,5 +1,5 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
-use std::{fmt, ops, sync::Arc};
+use std::{fmt, ops};
use base_db::CrateId;
use cfg::CfgExpr;
@@ -8,12 +8,12 @@ use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
+use triomphe::Arc;
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
- mod_path::{ModPath, PathKind},
- name::AsName,
+ mod_path::ModPath,
tt::{self, Subtree},
InFile,
};
@@ -21,6 +21,7 @@ use crate::{
/// Syntactical attributes, without filtering of `cfg_attr`s.
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct RawAttrs {
+ // FIXME: Make this a ThinArc
entries: Option<Arc<[Attr]>>,
}
@@ -50,7 +51,9 @@ impl RawAttrs {
path: Interned::new(ModPath::from(crate::name!(doc))),
}),
})
- .collect::<Arc<_>>();
+ .collect::<Vec<_>>();
+ // FIXME: use `Arc::from_iter` when it becomes available
+ let entries: Arc<[Attr]> = Arc::from(entries);
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
@@ -68,7 +71,7 @@ impl RawAttrs {
(Some(a), Some(b)) => {
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
Self {
- entries: Some(
+ entries: Some(Arc::from(
a.iter()
.cloned()
.chain(b.iter().map(|it| {
@@ -78,8 +81,9 @@ impl RawAttrs {
<< AttrId::AST_INDEX_BITS;
it
}))
- .collect(),
- ),
+ // FIXME: use `Arc::from_iter` when it becomes available
+ .collect::<Vec<_>>(),
+ )),
}
}
}
@@ -96,48 +100,51 @@ impl RawAttrs {
}
let crate_graph = db.crate_graph();
- let new_attrs = self
- .iter()
- .flat_map(|attr| -> SmallVec<[_; 1]> {
- let is_cfg_attr =
- attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
- if !is_cfg_attr {
- return smallvec![attr.clone()];
- }
-
- let subtree = match attr.token_tree_value() {
- Some(it) => it,
- _ => return smallvec![attr.clone()],
- };
-
- let (cfg, parts) = match parse_cfg_attr_input(subtree) {
- Some(it) => it,
- None => return smallvec![attr.clone()],
- };
- let index = attr.id;
- let attrs =
- parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
- let tree = Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: attr.to_vec(),
- };
- // FIXME hygiene
- let hygiene = Hygiene::new_unhygienic();
- Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
- });
-
- let cfg_options = &crate_graph[krate].cfg_options;
- let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
- let cfg = CfgExpr::parse(&cfg);
- if cfg_options.check(&cfg) == Some(false) {
- smallvec![]
- } else {
- cov_mark::hit!(cfg_attr_active);
-
- attrs.collect()
- }
- })
- .collect();
+ let new_attrs = Arc::from(
+ self.iter()
+ .flat_map(|attr| -> SmallVec<[_; 1]> {
+ let is_cfg_attr =
+ attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
+ if !is_cfg_attr {
+ return smallvec![attr.clone()];
+ }
+
+ let subtree = match attr.token_tree_value() {
+ Some(it) => it,
+ _ => return smallvec![attr.clone()],
+ };
+
+ let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+ Some(it) => it,
+ None => return smallvec![attr.clone()],
+ };
+ let index = attr.id;
+ let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
+ |(idx, attr)| {
+ let tree = Subtree {
+ delimiter: tt::Delimiter::unspecified(),
+ token_trees: attr.to_vec(),
+ };
+ // FIXME hygiene
+ let hygiene = Hygiene::new_unhygienic();
+ Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
+ },
+ );
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+ let cfg = CfgExpr::parse(&cfg);
+ if cfg_options.check(&cfg) == Some(false) {
+ smallvec![]
+ } else {
+ cov_mark::hit!(cfg_attr_active);
+
+ attrs.collect()
+ }
+ })
+ // FIXME: use `Arc::from_iter` when it becomes available
+ .collect::<Vec<_>>(),
+ );
RawAttrs { entries: Some(new_attrs) }
}
@@ -185,14 +192,14 @@ pub enum AttrInput {
/// `#[attr = "string"]`
Literal(SmolStr),
/// `#[attr(subtree)]`
- TokenTree(tt::Subtree, mbe::TokenMap),
+ TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
- AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
+ AttrInput::TokenTree(tt) => tt.0.fmt(f),
}
}
}
@@ -213,7 +220,7 @@ impl Attr {
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
- Some(Interned::new(AttrInput::TokenTree(tree, map)))
+ Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
} else {
None
};
@@ -249,7 +256,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
- AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees {
+ AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@@ -260,13 +267,17 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
- AttrInput::TokenTree(subtree, _) => Some(subtree),
+ AttrInput::TokenTree(tt) => Some(&tt.0),
_ => None,
}
}
/// Parses this attribute as a token tree consisting of comma separated paths.
- pub fn parse_path_comma_token_tree(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
+ pub fn parse_path_comma_token_tree<'a>(
+ &'a self,
+ db: &'a dyn ExpandDatabase,
+ hygiene: &'a Hygiene,
+ ) -> Option<impl Iterator<Item = ModPath> + 'a> {
let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis {
@@ -275,19 +286,37 @@ impl Attr {
let paths = args
.token_trees
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
- .filter_map(|tts| {
+ .filter_map(move |tts| {
if tts.is_empty() {
return None;
}
- let segments = tts.iter().filter_map(|tt| match tt {
- tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()),
- _ => None,
- });
- Some(ModPath::from_segments(PathKind::Plain, segments))
+ // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
+ let subtree = tt::Subtree {
+ delimiter: tt::Delimiter::unspecified(),
+ token_trees: tts.into_iter().cloned().collect(),
+ };
+ let (parse, _) =
+ mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
+ let meta = ast::Meta::cast(parse.syntax_node())?;
+ // Only simple paths are allowed.
+ if meta.eq_token().is_some() || meta.expr().is_some() || meta.token_tree().is_some()
+ {
+ return None;
+ }
+ let path = meta.path()?;
+ ModPath::from_src(db, path, hygiene)
});
Some(paths)
}
+
+ pub fn cfg(&self) -> Option<CfgExpr> {
+ if *self.path.as_ident()? == crate::name![cfg] {
+ self.token_tree_value().map(CfgExpr::parse)
+ } else {
+ None
+ }
+ }
}
pub fn collect_attrs(
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 277ecd939..80695bc06 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -96,7 +96,7 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
- MacroCallKind::Attr { attr_args, is_derive: true, .. } => &attr_args.0,
+ MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
_ => return ExpandResult::ok(tt::Subtree::empty()),
};
pseudo_derive_attr_expansion(tt, derives)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index 5c1a75132..3d1e272b9 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -1,12 +1,19 @@
//! Builtin derives.
+use ::tt::Ident;
use base_db::{CrateOrigin, LangCrateOrigin};
+use itertools::izip;
+use mbe::TokenMap;
+use rustc_hash::FxHashSet;
+use stdx::never;
use tracing::debug;
-use crate::tt::{self, TokenId};
-use syntax::{
- ast::{self, AstNode, HasGenericParams, HasModuleItem, HasName},
- match_ast,
+use crate::{
+ name::{AsName, Name},
+ tt::{self, TokenId},
+};
+use syntax::ast::{
+ self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
};
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
@@ -58,46 +65,201 @@ pub fn find_builtin_derive(ident: &name::Name) -> Option<BuiltinDeriveExpander>
BuiltinDeriveExpander::find_by_name(ident)
}
+enum VariantShape {
+ Struct(Vec<tt::Ident>),
+ Tuple(usize),
+ Unit,
+}
+
+fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
+ (0..n).map(|x| Ident::new(format!("f{x}"), tt::TokenId::unspecified()))
+}
+
+impl VariantShape {
+ fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
+ self.as_pattern_map(path, |x| quote!(#x))
+ }
+
+ fn field_names(&self) -> Vec<tt::Ident> {
+ match self {
+ VariantShape::Struct(s) => s.clone(),
+ VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
+ VariantShape::Unit => vec![],
+ }
+ }
+
+ fn as_pattern_map(
+ &self,
+ path: tt::Subtree,
+ field_map: impl Fn(&tt::Ident) -> tt::Subtree,
+ ) -> tt::Subtree {
+ match self {
+ VariantShape::Struct(fields) => {
+ let fields = fields.iter().map(|x| {
+ let mapped = field_map(x);
+ quote! { #x : #mapped , }
+ });
+ quote! {
+ #path { ##fields }
+ }
+ }
+ &VariantShape::Tuple(n) => {
+ let fields = tuple_field_iterator(n).map(|x| {
+ let mapped = field_map(&x);
+ quote! {
+ #mapped ,
+ }
+ });
+ quote! {
+ #path ( ##fields )
+ }
+ }
+ VariantShape::Unit => path,
+ }
+ }
+
+ fn from(value: Option<FieldList>, token_map: &TokenMap) -> Result<Self, ExpandError> {
+ let r = match value {
+ None => VariantShape::Unit,
+ Some(FieldList::RecordFieldList(x)) => VariantShape::Struct(
+ x.fields()
+ .map(|x| x.name())
+ .map(|x| name_to_token(token_map, x))
+ .collect::<Result<_, _>>()?,
+ ),
+ Some(FieldList::TupleFieldList(x)) => VariantShape::Tuple(x.fields().count()),
+ };
+ Ok(r)
+ }
+}
+
+enum AdtShape {
+ Struct(VariantShape),
+ Enum { variants: Vec<(tt::Ident, VariantShape)>, default_variant: Option<usize> },
+ Union,
+}
+
+impl AdtShape {
+ fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
+ self.as_pattern_map(name, |x| quote!(#x))
+ }
+
+ fn field_names(&self) -> Vec<Vec<tt::Ident>> {
+ match self {
+ AdtShape::Struct(s) => {
+ vec![s.field_names()]
+ }
+ AdtShape::Enum { variants, .. } => {
+ variants.iter().map(|(_, fields)| fields.field_names()).collect()
+ }
+ AdtShape::Union => {
+ never!("using fields of union in derive is always wrong");
+ vec![]
+ }
+ }
+ }
+
+ fn as_pattern_map(
+ &self,
+ name: &tt::Ident,
+ field_map: impl Fn(&tt::Ident) -> tt::Subtree,
+ ) -> Vec<tt::Subtree> {
+ match self {
+ AdtShape::Struct(s) => {
+ vec![s.as_pattern_map(quote! { #name }, field_map)]
+ }
+ AdtShape::Enum { variants, .. } => variants
+ .iter()
+ .map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
+ .collect(),
+ AdtShape::Union => {
+ never!("pattern matching on union is always wrong");
+ vec![quote! { un }]
+ }
+ }
+ }
+}
+
struct BasicAdtInfo {
name: tt::Ident,
- /// `Some(ty)` if it's a const param of type `ty`, `None` if it's a type param.
- param_types: Vec<Option<tt::Subtree>>,
+ shape: AdtShape,
+ /// first field is the name, and
+ /// second field is `Some(ty)` if it's a const param of type `ty`, `None` if it's a type param.
+ /// third fields is where bounds, if any
+ param_types: Vec<(tt::Subtree, Option<tt::Subtree>, Option<tt::Subtree>)>,
+ associated_types: Vec<tt::Subtree>,
}
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse");
- ExpandError::Other("invalid item definition".into())
+ ExpandError::other("invalid item definition")
})?;
let item = macro_items.items().next().ok_or_else(|| {
debug!("no module item parsed");
- ExpandError::Other("no item found".into())
+ ExpandError::other("no item found")
})?;
- let node = item.syntax();
- let (name, params) = match_ast! {
- match node {
- ast::Struct(it) => (it.name(), it.generic_param_list()),
- ast::Enum(it) => (it.name(), it.generic_param_list()),
- ast::Union(it) => (it.name(), it.generic_param_list()),
- _ => {
- debug!("unexpected node is {:?}", node);
- return Err(ExpandError::Other("expected struct, enum or union".into()))
- },
+ let adt = ast::Adt::cast(item.syntax().clone()).ok_or_else(|| {
+ debug!("expected adt, found: {:?}", item);
+ ExpandError::other("expected struct, enum or union")
+ })?;
+ let (name, generic_param_list, shape) = match &adt {
+ ast::Adt::Struct(it) => (
+ it.name(),
+ it.generic_param_list(),
+ AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?),
+ ),
+ ast::Adt::Enum(it) => {
+ let default_variant = it
+ .variant_list()
+ .into_iter()
+ .flat_map(|x| x.variants())
+ .position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
+ (
+ it.name(),
+ it.generic_param_list(),
+ AdtShape::Enum {
+ default_variant,
+ variants: it
+ .variant_list()
+ .into_iter()
+ .flat_map(|x| x.variants())
+ .map(|x| {
+ Ok((
+ name_to_token(&token_map, x.name())?,
+ VariantShape::from(x.field_list(), &token_map)?,
+ ))
+ })
+ .collect::<Result<_, ExpandError>>()?,
+ },
+ )
}
+ ast::Adt::Union(it) => (it.name(), it.generic_param_list(), AdtShape::Union),
};
- let name = name.ok_or_else(|| {
- debug!("parsed item has no name");
- ExpandError::Other("missing name".into())
- })?;
- let name_token_id =
- token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
- let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
- let param_types = params
+
+ let mut param_type_set: FxHashSet<Name> = FxHashSet::default();
+ let param_types = generic_param_list
.into_iter()
.flat_map(|param_list| param_list.type_or_const_params())
.map(|param| {
- if let ast::TypeOrConstParam::Const(param) = param {
+ let name = {
+ let this = param.name();
+ match this {
+ Some(x) => {
+ param_type_set.insert(x.as_name());
+ mbe::syntax_node_to_token_tree(x.syntax()).0
+ }
+ None => tt::Subtree::empty(),
+ }
+ };
+ let bounds = match &param {
+ ast::TypeOrConstParam::Type(x) => {
+ x.type_bound_list().map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
+ }
+ ast::TypeOrConstParam::Const(_) => None,
+ };
+ let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
@@ -105,27 +267,103 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
Some(ty)
} else {
None
- }
+ };
+ (name, ty, bounds)
+ })
+ .collect();
+
+ // For a generic parameter `T`, when shorthand associated type `T::Assoc` appears in field
+ // types (of any variant for enums), we generate trait bound for it. It sounds reasonable to
+ // also generate trait bound for qualified associated type `<T as Trait>::Assoc`, but rustc
+ // does not do that for some unknown reason.
+ //
+ // See the analogous function in rustc [find_type_parameters()] and rust-lang/rust#50730.
+ // [find_type_parameters()]: https://github.com/rust-lang/rust/blob/1.70.0/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs#L378
+
+ // It's cumbersome to deal with the distinct structures of ADTs, so let's just get untyped
+ // `SyntaxNode` that contains fields and look for descendant `ast::PathType`s. Of note is that
+ // we should not inspect `ast::PathType`s in parameter bounds and where clauses.
+ let field_list = match adt {
+ ast::Adt::Enum(it) => it.variant_list().map(|list| list.syntax().clone()),
+ ast::Adt::Struct(it) => it.field_list().map(|list| list.syntax().clone()),
+ ast::Adt::Union(it) => it.record_field_list().map(|list| list.syntax().clone()),
+ };
+ let associated_types = field_list
+ .into_iter()
+ .flat_map(|it| it.descendants())
+ .filter_map(ast::PathType::cast)
+ .filter_map(|p| {
+ let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
+ param_type_set.contains(&name).then_some(p)
})
+ .map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
.collect();
- Ok(BasicAdtInfo { name: name_token, param_types })
+ let name_token = name_to_token(&token_map, name)?;
+ Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
-fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> {
+fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
+ let name = name.ok_or_else(|| {
+ debug!("parsed item has no name");
+ ExpandError::other("missing name")
+ })?;
+ let name_token_id =
+ token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
+ let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
+ Ok(name_token)
+}
+
+/// Given that we are deriving a trait `DerivedTrait` for a type like:
+///
+/// ```ignore (only-for-syntax-highlight)
+/// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> where C: WhereTrait {
+/// a: A,
+/// b: B::Item,
+/// b1: <B as DeclaredTrait>::Item,
+/// c1: <C as WhereTrait>::Item,
+/// c2: Option<<C as WhereTrait>::Item>,
+/// ...
+/// }
+/// ```
+///
+/// create an impl like:
+///
+/// ```ignore (only-for-syntax-highlight)
+/// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where
+/// C: WhereTrait,
+/// A: DerivedTrait + B1 + ... + BN,
+/// B: DerivedTrait + B1 + ... + BN,
+/// C: DerivedTrait + B1 + ... + BN,
+/// B::Item: DerivedTrait + B1 + ... + BN,
+/// <C as WhereTrait>::Item: DerivedTrait + B1 + ... + BN,
+/// ...
+/// {
+/// ...
+/// }
+/// ```
+///
+/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
+/// therefore does not get bound by the derived trait.
+fn expand_simple_derive(
+ tt: &tt::Subtree,
+ trait_path: tt::Subtree,
+ make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
let info = match parse_adt(tt) {
Ok(info) => info,
- Err(e) => return ExpandResult::with_err(tt::Subtree::empty(), e),
+ Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
+ let trait_body = make_trait_body(&info);
+ let mut where_block = vec![];
let (params, args): (Vec<_>, Vec<_>) = info
.param_types
.into_iter()
- .enumerate()
- .map(|(idx, param_ty)| {
- let ident = tt::Leaf::Ident(tt::Ident {
- span: tt::TokenId::unspecified(),
- text: format!("T{idx}").into(),
- });
+ .map(|(ident, param_ty, bound)| {
let ident_ = ident.clone();
+ if let Some(b) = bound {
+ let ident = ident.clone();
+ where_block.push(quote! { #ident : #b , });
+ }
if let Some(ty) = param_ty {
(quote! { const #ident : #ty , }, quote! { #ident_ , })
} else {
@@ -134,9 +372,16 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu
}
})
.unzip();
+
+ where_block.extend(info.associated_types.iter().map(|x| {
+ let x = x.clone();
+ let bound = trait_path.clone();
+ quote! { #x : #bound , }
+ }));
+
let name = info.name;
let expanded = quote! {
- impl < ##params > #trait_path for #name < ##args > {}
+ impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
};
ExpandResult::ok(expanded)
}
@@ -163,7 +408,7 @@ fn copy_expand(
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::marker::Copy })
+ expand_simple_derive(tt, quote! { #krate::marker::Copy }, |_| quote! {})
}
fn clone_expand(
@@ -172,7 +417,63 @@ fn clone_expand(
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::clone::Clone })
+ expand_simple_derive(tt, quote! { #krate::clone::Clone }, |adt| {
+ if matches!(adt.shape, AdtShape::Union) {
+ let star = tt::Punct {
+ char: '*',
+ spacing: ::tt::Spacing::Alone,
+ span: tt::TokenId::unspecified(),
+ };
+ return quote! {
+ fn clone(&self) -> Self {
+ #star self
+ }
+ };
+ }
+ if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
+ let star = tt::Punct {
+ char: '*',
+ spacing: ::tt::Spacing::Alone,
+ span: tt::TokenId::unspecified(),
+ };
+ return quote! {
+ fn clone(&self) -> Self {
+ match #star self {}
+ }
+ };
+ }
+ let name = &adt.name;
+ let patterns = adt.shape.as_pattern(name);
+ let exprs = adt.shape.as_pattern_map(name, |x| quote! { #x .clone() });
+ let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
+ let fat_arrow = fat_arrow();
+ quote! {
+ #pat #fat_arrow #expr,
+ }
+ });
+
+ quote! {
+ fn clone(&self) -> Self {
+ match self {
+ ##arms
+ }
+ }
+ }
+ })
+}
+
+/// This function exists since `quote! { => }` doesn't work.
+fn fat_arrow() -> ::tt::Subtree<TokenId> {
+ let eq =
+ tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
+ quote! { #eq> }
+}
+
+/// This function exists since `quote! { && }` doesn't work.
+fn and_and() -> ::tt::Subtree<TokenId> {
+ let and =
+ tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
+ quote! { #and& }
}
fn default_expand(
@@ -180,8 +481,38 @@ fn default_expand(
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::default::Default })
+ let krate = &find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::default::Default }, |adt| {
+ let body = match &adt.shape {
+ AdtShape::Struct(fields) => {
+ let name = &adt.name;
+ fields
+ .as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
+ }
+ AdtShape::Enum { default_variant, variants } => {
+ if let Some(d) = default_variant {
+ let (name, fields) = &variants[*d];
+ let adt_name = &adt.name;
+ fields.as_pattern_map(
+ quote!(#adt_name :: #name),
+ |_| quote!(#krate::default::Default::default()),
+ )
+ } else {
+ // FIXME: Return expand error here
+ quote!()
+ }
+ }
+ AdtShape::Union => {
+ // FIXME: Return expand error here
+ quote!()
+ }
+ };
+ quote! {
+ fn default() -> Self {
+ #body
+ }
+ }
+ })
}
fn debug_expand(
@@ -189,8 +520,79 @@ fn debug_expand(
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::fmt::Debug })
+ let krate = &find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::fmt::Debug }, |adt| {
+ let for_variant = |name: String, v: &VariantShape| match v {
+ VariantShape::Struct(fields) => {
+ let for_fields = fields.iter().map(|x| {
+ let x_string = x.to_string();
+ quote! {
+ .field(#x_string, & #x)
+ }
+ });
+ quote! {
+ f.debug_struct(#name) ##for_fields .finish()
+ }
+ }
+ VariantShape::Tuple(n) => {
+ let for_fields = tuple_field_iterator(*n).map(|x| {
+ quote! {
+ .field( & #x)
+ }
+ });
+ quote! {
+ f.debug_tuple(#name) ##for_fields .finish()
+ }
+ }
+ VariantShape::Unit => quote! {
+ f.write_str(#name)
+ },
+ };
+ if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
+ let star = tt::Punct {
+ char: '*',
+ spacing: ::tt::Spacing::Alone,
+ span: tt::TokenId::unspecified(),
+ };
+ return quote! {
+ fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
+ match #star self {}
+ }
+ };
+ }
+ let arms = match &adt.shape {
+ AdtShape::Struct(fields) => {
+ let fat_arrow = fat_arrow();
+ let name = &adt.name;
+ let pat = fields.as_pattern(quote!(#name));
+ let expr = for_variant(name.to_string(), fields);
+ vec![quote! { #pat #fat_arrow #expr }]
+ }
+ AdtShape::Enum { variants, .. } => variants
+ .iter()
+ .map(|(name, v)| {
+ let fat_arrow = fat_arrow();
+ let adt_name = &adt.name;
+ let pat = v.as_pattern(quote!(#adt_name :: #name));
+ let expr = for_variant(name.to_string(), v);
+ quote! {
+ #pat #fat_arrow #expr ,
+ }
+ })
+ .collect(),
+ AdtShape::Union => {
+ // FIXME: Return expand error here
+ vec![]
+ }
+ };
+ quote! {
+ fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
+ match self {
+ ##arms
+ }
+ }
+ }
+ })
}
fn hash_expand(
@@ -198,8 +600,47 @@ fn hash_expand(
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::hash::Hash })
+ let krate = &find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::hash::Hash }, |adt| {
+ if matches!(adt.shape, AdtShape::Union) {
+ // FIXME: Return expand error here
+ return quote! {};
+ }
+ if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
+ let star = tt::Punct {
+ char: '*',
+ spacing: ::tt::Spacing::Alone,
+ span: tt::TokenId::unspecified(),
+ };
+ return quote! {
+ fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
+ match #star self {}
+ }
+ };
+ }
+ let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
+ |(pat, names)| {
+ let expr = {
+ let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); });
+ quote! { {
+ ##it
+ } }
+ };
+ let fat_arrow = fat_arrow();
+ quote! {
+ #pat #fat_arrow #expr ,
+ }
+ },
+ );
+ quote! {
+ fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
+ #krate::mem::discriminant(self).hash(ra_expand_state);
+ match self {
+ ##arms
+ }
+ }
+ }
+ })
}
fn eq_expand(
@@ -208,7 +649,7 @@ fn eq_expand(
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Eq })
+ expand_simple_derive(tt, quote! { #krate::cmp::Eq }, |_| quote! {})
}
fn partial_eq_expand(
@@ -217,7 +658,65 @@ fn partial_eq_expand(
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialEq })
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }, |adt| {
+ if matches!(adt.shape, AdtShape::Union) {
+ // FIXME: Return expand error here
+ return quote! {};
+ }
+ let name = &adt.name;
+
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ |(pat1, pat2, names)| {
+ let fat_arrow = fat_arrow();
+ let body = match &*names {
+ [] => {
+ quote!(true)
+ }
+ [first, rest @ ..] => {
+ let rest = rest.iter().map(|x| {
+ let t1 = Ident::new(format!("{}_self", x.text), x.span);
+ let t2 = Ident::new(format!("{}_other", x.text), x.span);
+ let and_and = and_and();
+ quote!(#and_and #t1 .eq( #t2 ))
+ });
+ let first = {
+ let t1 = Ident::new(format!("{}_self", first.text), first.span);
+ let t2 = Ident::new(format!("{}_other", first.text), first.span);
+ quote!(#t1 .eq( #t2 ))
+ };
+ quote!(#first ##rest)
+ }
+ };
+ quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ },
+ );
+
+ let fat_arrow = fat_arrow();
+ quote! {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ ##arms
+ _unused #fat_arrow false
+ }
+ }
+ }
+ })
+}
+
+fn self_and_other_patterns(
+ adt: &BasicAdtInfo,
+ name: &tt::Ident,
+) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
+ let self_patterns = adt.shape.as_pattern_map(name, |x| {
+ let t = Ident::new(format!("{}_self", x.text), x.span);
+ quote!(#t)
+ });
+ let other_patterns = adt.shape.as_pattern_map(name, |x| {
+ let t = Ident::new(format!("{}_other", x.text), x.span);
+ quote!(#t)
+ });
+ (self_patterns, other_patterns)
}
fn ord_expand(
@@ -225,8 +724,63 @@ fn ord_expand(
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Ord })
+ let krate = &find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::Ord }, |adt| {
+ fn compare(
+ krate: &tt::TokenTree,
+ left: tt::Subtree,
+ right: tt::Subtree,
+ rest: tt::Subtree,
+ ) -> tt::Subtree {
+ let fat_arrow1 = fat_arrow();
+ let fat_arrow2 = fat_arrow();
+ quote! {
+ match #left.cmp(&#right) {
+ #krate::cmp::Ordering::Equal #fat_arrow1 {
+ #rest
+ }
+ c #fat_arrow2 return c,
+ }
+ }
+ }
+ if matches!(adt.shape, AdtShape::Union) {
+ // FIXME: Return expand error here
+ return quote!();
+ }
+ let left = quote!(#krate::intrinsics::discriminant_value(self));
+ let right = quote!(#krate::intrinsics::discriminant_value(other));
+
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ |(pat1, pat2, fields)| {
+ let mut body = quote!(#krate::cmp::Ordering::Equal);
+ for f in fields.into_iter().rev() {
+ let t1 = Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(#t1), quote!(#t2), body);
+ }
+ let fat_arrow = fat_arrow();
+ quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ },
+ );
+ let fat_arrow = fat_arrow();
+ let body = compare(
+ krate,
+ left,
+ right,
+ quote! {
+ match (self, other) {
+ ##arms
+ _unused #fat_arrow #krate::cmp::Ordering::Equal
+ }
+ },
+ );
+ quote! {
+ fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
+ #body
+ }
+ }
+ })
}
fn partial_ord_expand(
@@ -234,6 +788,61 @@ fn partial_ord_expand(
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd })
+ let krate = &find_builtin_crate(db, id);
+ expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }, |adt| {
+ fn compare(
+ krate: &tt::TokenTree,
+ left: tt::Subtree,
+ right: tt::Subtree,
+ rest: tt::Subtree,
+ ) -> tt::Subtree {
+ let fat_arrow1 = fat_arrow();
+ let fat_arrow2 = fat_arrow();
+ quote! {
+ match #left.partial_cmp(&#right) {
+ #krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
+ #rest
+ }
+ c #fat_arrow2 return c,
+ }
+ }
+ }
+ if matches!(adt.shape, AdtShape::Union) {
+ // FIXME: Return expand error here
+ return quote!();
+ }
+ let left = quote!(#krate::intrinsics::discriminant_value(self));
+ let right = quote!(#krate::intrinsics::discriminant_value(other));
+
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ |(pat1, pat2, fields)| {
+ let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
+ for f in fields.into_iter().rev() {
+ let t1 = Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(#t1), quote!(#t2), body);
+ }
+ let fat_arrow = fat_arrow();
+ quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ },
+ );
+ let fat_arrow = fat_arrow();
+ let body = compare(
+ krate,
+ left,
+ right,
+ quote! {
+ match (self, other) {
+ ##arms
+ _unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
+ }
+ },
+ );
+ quote! {
+ fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
+ #body
+ }
+ }
+ })
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index a9c5e1488..a9f0c154b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -1,16 +1,21 @@
//! Builtin macro
+use std::mem;
+
+use ::tt::Ident;
use base_db::{AnchoredPath, Edition, FileId};
use cfg::CfgExpr;
use either::Either;
-use mbe::{parse_exprs_with_sep, parse_to_token_tree};
+use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
+use rustc_hash::FxHashMap;
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
- db::ExpandDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc,
+ db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
+ MacroCallLoc,
};
macro_rules! register_builtin {
@@ -45,7 +50,7 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
- ) -> ExpandResult<ExpandedEager> {
+ ) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
@@ -63,16 +68,9 @@ macro_rules! register_builtin {
};
}
-#[derive(Debug)]
-pub struct ExpandedEager {
- pub(crate) subtree: tt::Subtree,
- /// The included file ID of the include macro.
- pub(crate) included_file: Option<FileId>,
-}
-
-impl ExpandedEager {
- fn new(subtree: tt::Subtree) -> Self {
- ExpandedEager { subtree, included_file: None }
+impl EagerExpander {
+ pub fn is_include(&self) -> bool {
+ matches!(self, EagerExpander::Include)
}
}
@@ -90,11 +88,6 @@ register_builtin! {
(module_path, ModulePath) => module_path_expand,
(assert, Assert) => assert_expand,
(stringify, Stringify) => stringify_expand,
- (format_args, FormatArgs) => format_args_expand,
- (const_format_args, ConstFormatArgs) => format_args_expand,
- // format_args_nl only differs in that it adds a newline in the end,
- // so we use the same stub expansion for now
- (format_args_nl, FormatArgsNl) => format_args_expand,
(llvm_asm, LlvmAsm) => asm_expand,
(asm, Asm) => asm_expand,
(global_asm, GlobalAsm) => global_asm_expand,
@@ -106,6 +99,9 @@ register_builtin! {
(trace_macros, TraceMacros) => trace_macros_expand,
EAGER:
+ (format_args, FormatArgs) => format_args_expand,
+ (const_format_args, ConstFormatArgs) => format_args_expand,
+ (format_args_nl, FormatArgsNl) => format_args_nl_expand,
(compile_error, CompileError) => compile_error_expand,
(concat, Concat) => concat_expand,
(concat_idents, ConcatIdents) => concat_idents_expand,
@@ -135,9 +131,8 @@ fn line_expand(
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
- let line_num = 0;
let expanded = quote! {
- #line_num
+ 0 as u32
};
ExpandResult::ok(expanded)
@@ -179,9 +174,8 @@ fn column_expand(
_tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
- let col_num = 0;
let expanded = quote! {
- #col_num
+ 0 as u32
};
ExpandResult::ok(expanded)
@@ -234,45 +228,170 @@ fn file_expand(
}
fn format_args_expand(
+ db: &dyn ExpandDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ format_args_expand_general(db, id, tt, "")
+}
+
+fn format_args_nl_expand(
+ db: &dyn ExpandDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ format_args_expand_general(db, id, tt, "\\n")
+}
+
+fn format_args_expand_general(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ end_string: &str,
) -> ExpandResult<tt::Subtree> {
- // We expand `format_args!("", a1, a2)` to
- // ```
- // $crate::fmt::Arguments::new_v1(&[], &[
- // $crate::fmt::Argument::new(&arg1,$crate::fmt::Display::fmt),
- // $crate::fmt::Argument::new(&arg2,$crate::fmt::Display::fmt),
- // ])
- // ```,
- // which is still not really correct, but close enough for now
- let mut args = parse_exprs_with_sep(tt, ',');
-
- if args.is_empty() {
- return ExpandResult::with_err(
- tt::Subtree::empty(),
- mbe::ExpandError::NoMatchingRule.into(),
- );
- }
- for arg in &mut args {
+ let args = parse_exprs_with_sep(tt, ',');
+
+ let expand_error =
+ ExpandResult::new(tt::Subtree::empty(), mbe::ExpandError::NoMatchingRule.into());
+
+ let mut key_args = FxHashMap::default();
+ let mut args = args.into_iter().filter_map(|mut arg| {
// Remove `key =`.
if matches!(arg.token_trees.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=')
{
// but not with `==`
- if !matches!(arg.token_trees.get(2), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' )
+ if !matches!(arg.token_trees.get(2), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=')
{
- arg.token_trees.drain(..2);
+ let key = arg.token_trees.drain(..2).next().unwrap();
+ key_args.insert(key.to_string(), arg);
+ return None;
+ }
+ }
+ Some(arg)
+ }).collect::<Vec<_>>().into_iter();
+ // ^^^^^^^ we need this collect, to enforce the side effect of the filter_map closure (building the `key_args`)
+ let Some(format_subtree) = args.next() else {
+ return expand_error;
+ };
+ let format_string = (|| {
+ let token_tree = format_subtree.token_trees.get(0)?;
+ match token_tree {
+ tt::TokenTree::Leaf(l) => match l {
+ tt::Leaf::Literal(l) => {
+ if let Some(mut text) = l.text.strip_prefix('r') {
+ let mut raw_sharps = String::new();
+ while let Some(t) = text.strip_prefix('#') {
+ text = t;
+ raw_sharps.push('#');
+ }
+ text =
+ text.strip_suffix(&raw_sharps)?.strip_prefix('"')?.strip_suffix('"')?;
+ Some((text, l.span, Some(raw_sharps)))
+ } else {
+ let text = l.text.strip_prefix('"')?.strip_suffix('"')?;
+ let span = l.span;
+ Some((text, span, None))
+ }
+ }
+ _ => None,
+ },
+ tt::TokenTree::Subtree(_) => None,
+ }
+ })();
+ let Some((format_string, _format_string_span, raw_sharps)) = format_string else {
+ return expand_error;
+ };
+ let mut format_iter = format_string.chars().peekable();
+ let mut parts = vec![];
+ let mut last_part = String::new();
+ let mut arg_tts = vec![];
+ let mut err = None;
+ while let Some(c) = format_iter.next() {
+ // Parsing the format string. See https://doc.rust-lang.org/std/fmt/index.html#syntax for the grammar and more info
+ match c {
+ '{' => {
+ if format_iter.peek() == Some(&'{') {
+ format_iter.next();
+ last_part.push('{');
+ continue;
+ }
+ let mut argument = String::new();
+ while ![Some(&'}'), Some(&':')].contains(&format_iter.peek()) {
+ argument.push(match format_iter.next() {
+ Some(c) => c,
+ None => return expand_error,
+ });
+ }
+ let format_spec = match format_iter.next().unwrap() {
+ '}' => "".to_owned(),
+ ':' => {
+ let mut s = String::new();
+ while let Some(c) = format_iter.next() {
+ if c == '}' {
+ break;
+ }
+ s.push(c);
+ }
+ s
+ }
+ _ => unreachable!(),
+ };
+ parts.push(mem::take(&mut last_part));
+ let arg_tree = if argument.is_empty() {
+ match args.next() {
+ Some(x) => x,
+ None => {
+ err = Some(mbe::ExpandError::NoMatchingRule.into());
+ tt::Subtree::empty()
+ }
+ }
+ } else if let Some(tree) = key_args.get(&argument) {
+ tree.clone()
+ } else {
+ // FIXME: we should pick the related substring of the `_format_string_span` as the span. You
+ // can use `.char_indices()` instead of `.char()` for `format_iter` to find the substring interval.
+ let ident = Ident::new(argument, tt::TokenId::unspecified());
+ quote!(#ident)
+ };
+ let formatter = match &*format_spec {
+ "?" => quote!(::core::fmt::Debug::fmt),
+ "" => quote!(::core::fmt::Display::fmt),
+ _ => {
+ // FIXME: implement the rest and return expand error here
+ quote!(::core::fmt::Display::fmt)
+ }
+ };
+ arg_tts.push(quote! { ::core::fmt::Argument::new(&(#arg_tree), #formatter), });
}
+ '}' => {
+ if format_iter.peek() == Some(&'}') {
+ format_iter.next();
+ last_part.push('}');
+ } else {
+ return expand_error;
+ }
+ }
+ _ => last_part.push(c),
}
}
- let _format_string = args.remove(0);
- let arg_tts = args.into_iter().flat_map(|arg| {
- quote! { #DOLLAR_CRATE::fmt::Argument::new(&(#arg), #DOLLAR_CRATE::fmt::Display::fmt), }
- }.token_trees);
+ last_part += end_string;
+ if !last_part.is_empty() {
+ parts.push(last_part);
+ }
+ let part_tts = parts.into_iter().map(|x| {
+ let text = if let Some(raw) = &raw_sharps {
+ format!("r{raw}\"{}\"{raw}", x).into()
+ } else {
+ format!("\"{}\"", x).into()
+ };
+ let l = tt::Literal { span: tt::TokenId::unspecified(), text };
+ quote!(#l ,)
+ });
+ let arg_tts = arg_tts.into_iter().flat_map(|arg| arg.token_trees);
let expanded = quote! {
- #DOLLAR_CRATE::fmt::Arguments::new_v1(&[], &[##arg_tts])
+ ::core::fmt::Arguments::new_v1(&[##part_tts], &[##arg_tts])
};
- ExpandResult::ok(expanded)
+ ExpandResult { value: expanded, err }
}
fn asm_expand(
@@ -382,23 +501,23 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
- Some(unquoted) => ExpandError::Other(unquoted.into()),
- None => ExpandError::Other("`compile_error!` argument must be a string".into()),
+ Some(unquoted) => ExpandError::other(unquoted),
+ None => ExpandError::other("`compile_error!` argument must be a string"),
},
- _ => ExpandError::Other("`compile_error!` argument must be a string".into()),
+ _ => ExpandError::other("`compile_error!` argument must be a string"),
};
- ExpandResult { value: ExpandedEager::new(quote! {}), err: Some(err) }
+ ExpandResult { value: quote! {}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
for (i, mut t) in tt.token_trees.iter().enumerate() {
@@ -437,14 +556,14 @@ fn concat_expand(
}
}
}
- ExpandResult { value: ExpandedEager::new(quote!(#text)), err }
+ ExpandResult { value: quote!(#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
for (i, t) in tt.token_trees.iter().enumerate() {
@@ -477,7 +596,7 @@ fn concat_bytes_expand(
}
}
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: ExpandedEager::new(quote!([#ident])), err }
+ ExpandResult { value: quote!([#ident]), err }
}
fn concat_bytes_expand_subtree(
@@ -510,7 +629,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
for (i, t) in tt.token_trees.iter().enumerate() {
@@ -525,7 +644,7 @@ fn concat_idents_expand(
}
}
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: ExpandedEager::new(quote!(#ident)), err }
+ ExpandResult { value: quote!(#ident), err }
}
fn relative_file(
@@ -538,10 +657,10 @@ fn relative_file(
let path = AnchoredPath { anchor: call_site, path: path_str };
let res = db
.resolve_path(path)
- .ok_or_else(|| ExpandError::Other(format!("failed to load file `{path_str}`").into()))?;
+ .ok_or_else(|| ExpandError::other(format!("failed to load file `{path_str}`")))?;
// Prevent include itself
if res == call_site && !allow_recursion {
- Err(ExpandError::Other(format!("recursive inclusion of `{path_str}`").into()))
+ Err(ExpandError::other(format!("recursive inclusion of `{path_str}`")))
} else {
Ok(res)
}
@@ -560,38 +679,37 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
- tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
- let res = (|| {
- let path = parse_string(tt)?;
- let file_id = relative_file(db, arg_id, &path, false)?;
-
- let subtree =
- parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?.0;
- Ok((subtree, file_id))
- })();
-
- match res {
- Ok((subtree, file_id)) => {
- ExpandResult::ok(ExpandedEager { subtree, included_file: Some(file_id) })
- }
- Err(e) => ExpandResult::with_err(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- ),
+ _tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ match db.include_expand(arg_id) {
+ Ok((res, _)) => ExpandResult::ok(res.0.clone()),
+ Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
}
}
+pub(crate) fn include_arg_to_tt(
+ db: &dyn ExpandDatabase,
+ arg_id: MacroCallId,
+) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
+ let loc = db.lookup_intern_macro_call(arg_id);
+ let Some(EagerCallInfo {arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else {
+ panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
+ };
+ let path = parse_string(&arg.0)?;
+ let file_id = relative_file(db, *arg_id, &path, false)?;
+
+ let (subtree, map) =
+ parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
+ Ok((triomphe::Arc::new((subtree, map)), file_id))
+}
+
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
if let Err(e) = parse_string(tt) {
- return ExpandResult::with_err(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- );
+ return ExpandResult::new(tt::Subtree::empty(), e);
}
// FIXME: actually read the file here if the user asked for macro expansion
@@ -602,22 +720,17 @@ fn include_bytes_expand(
span: tt::TokenId::unspecified(),
}))],
};
- ExpandResult::ok(ExpandedEager::new(res))
+ ExpandResult::ok(res)
}
fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
- Err(e) => {
- return ExpandResult::with_err(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- )
- }
+ Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
// FIXME: we're not able to read excluded files (which is most of them because
@@ -627,14 +740,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
- return ExpandResult::ok(ExpandedEager::new(quote!("")));
+ return ExpandResult::ok(quote!(""));
}
};
let text = db.file_text(file_id);
let text = &*text;
- ExpandResult::ok(ExpandedEager::new(quote!(#text)))
+ ExpandResult::ok(quote!(#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@@ -646,15 +759,10 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => {
- return ExpandResult::with_err(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- )
- }
+ Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
let mut err = None;
@@ -662,41 +770,34 @@ fn env_expand(
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
if key == "OUT_DIR" {
- err = Some(ExpandError::Other(
- r#"`OUT_DIR` not set, enable "build scripts" to fix"#.into(),
- ));
+ err = Some(ExpandError::other(r#"`OUT_DIR` not set, enable "build scripts" to fix"#));
}
// If the variable is unset, still return a dummy string to help type inference along.
// We cannot use an empty string here, because for
// `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
// `include!("foo.rs"), which might go to infinite loop
- "__RA_UNIMPLEMENTED__".to_string()
+ "UNRESOLVED_ENV_VAR".to_string()
});
let expanded = quote! { #s };
- ExpandResult { value: ExpandedEager::new(expanded), err }
+ ExpandResult { value: expanded, err }
}
fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
-) -> ExpandResult<ExpandedEager> {
+) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => {
- return ExpandResult::with_err(
- ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
- e,
- )
- }
+ Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
-
+ // FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
- None => quote! { #DOLLAR_CRATE::option::Option::None::<&str> },
- Some(s) => quote! { #DOLLAR_CRATE::option::Option::Some(#s) },
+ None => quote! { ::core::option::Option::None::<&str> },
+ Some(s) => quote! { ::core::option::Option::Some(#s) },
};
- ExpandResult::ok(ExpandedEager::new(expanded))
+ ExpandResult::ok(expanded)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 45572499e..78b2db730 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,22 +1,22 @@
//! Defines database & queries for macro expansion.
-use std::sync::Arc;
-
-use base_db::{salsa, SourceDatabase};
+use base_db::{salsa, Edition, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::syntax_node_to_token_tree;
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, HasAttrs, HasDocComments},
- AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, T,
+ AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
+use triomphe::Arc;
use crate::{
- ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, fixup,
- hygiene::HygieneFrame, tt, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
- ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
- MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
+ ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
+ builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander,
+ BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
+ ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
+ MacroDefKind, MacroFile, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@@ -33,6 +33,8 @@ pub enum TokenExpander {
DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
/// Stuff like `line!` and `file!`.
Builtin(BuiltinFnLikeExpander),
+ /// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
+ BuiltinEager(EagerExpander),
/// `global_allocator` and such.
BuiltinAttr(BuiltinAttrExpander),
/// `derive(Copy)` and such.
@@ -51,6 +53,7 @@ impl TokenExpander {
match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
+ TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into),
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
TokenExpander::ProcMacro(_) => {
@@ -66,6 +69,7 @@ impl TokenExpander {
match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
TokenExpander::Builtin(..)
+ | TokenExpander::BuiltinEager(..)
| TokenExpander::BuiltinAttr(..)
| TokenExpander::BuiltinDerive(..)
| TokenExpander::ProcMacro(..) => id,
@@ -76,6 +80,7 @@ impl TokenExpander {
match self {
TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
TokenExpander::Builtin(..)
+ | TokenExpander::BuiltinEager(..)
| TokenExpander::BuiltinAttr(..)
| TokenExpander::BuiltinDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
@@ -90,12 +95,15 @@ pub trait ExpandDatabase: SourceDatabase {
/// Main public API -- parses a hir file, not caring whether it's a real
/// file or a macro expansion.
#[salsa::transparent]
- fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
+ fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
+ #[salsa::transparent]
+ fn parse_or_expand_with_err(&self, file_id: HirFileId) -> ExpandResult<Parse<SyntaxNode>>;
/// Implementation for the macro case.
+ // This query is LRU cached
fn parse_macro_expansion(
&self,
macro_file: MacroFile,
- ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>;
+ ) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@@ -119,15 +127,27 @@ pub trait ExpandDatabase: SourceDatabase {
/// just fetches procedural ones.
fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
- /// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory)
- fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
+ /// Expand macro call to a token tree.
+ // This query is LRU cached
+ fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
+ #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
+ fn include_expand(
+ &self,
+ arg_id: MacroCallId,
+ ) -> Result<
+ (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
+ ExpandError,
+ >;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
- /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
- /// heroically debugged this once!
+ /// non-determinism breaks salsa in a very, very, very bad way.
+ /// @edwin0cheng heroically debugged this once!
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
- /// Firewall query that returns the error from the `macro_expand` query.
- fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>;
+ /// Firewall query that returns the errors from the `parse_macro_expansion` query.
+ fn parse_macro_expansion_error(
+ &self,
+ macro_call: MacroCallId,
+ ) -> ExpandResult<Box<[SyntaxError]>>;
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
}
@@ -159,8 +179,8 @@ pub fn expand_speculative(
);
let (attr_arg, token_id) = match loc.kind {
- MacroCallKind::Attr { invoc_attr_index, is_derive, .. } => {
- let attr = if is_derive {
+ MacroCallKind::Attr { invoc_attr_index, .. } => {
+ let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
ast::Attr::cast(speculative_args.clone())
} else {
@@ -236,17 +256,26 @@ pub fn expand_speculative(
}
fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
- let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
- Arc::new(map)
+ Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
}
-fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
+fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
+ HirFileIdRepr::MacroFile(macro_file) => {
+ db.parse_macro_expansion(macro_file).value.0.syntax_node()
+ }
+ }
+}
+
+fn parse_or_expand_with_err(
+ db: &dyn ExpandDatabase,
+ file_id: HirFileId,
+) -> ExpandResult<Parse<SyntaxNode>> {
match file_id.repr() {
- HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
+ HirFileIdRepr::FileId(file_id) => ExpandResult::ok(db.parse(file_id).to_syntax()),
HirFileIdRepr::MacroFile(macro_file) => {
- // FIXME: Note how we convert from `Parse` to `SyntaxNode` here,
- // forgetting about parse errors.
- db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
+ db.parse_macro_expansion(macro_file).map(|(it, _)| it)
}
}
}
@@ -254,35 +283,9 @@ fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> Option<Syntax
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
-) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
+) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
let _p = profile::span("parse_macro_expansion");
- let mbe::ValueResult { value, err } = db.macro_expand(macro_file.macro_call_id);
-
- if let Some(err) = &err {
- // Note:
- // The final goal we would like to make all parse_macro success,
- // such that the following log will not call anyway.
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let node = loc.kind.to_node(db);
-
- // collect parent information for warning log
- let parents =
- std::iter::successors(loc.kind.file_id().call_node(db), |it| it.file_id.call_node(db))
- .map(|n| format!("{:#}", n.value))
- .collect::<Vec<_>>()
- .join("\n");
-
- tracing::debug!(
- "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
- err,
- node.value,
- parents
- );
- }
- let tt = match value {
- Some(tt) => tt,
- None => return ExpandResult { value: None, err },
- };
+ let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
@@ -291,16 +294,21 @@ fn parse_macro_expansion(
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
- ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err }
+ ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
}
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
- let arg = db.macro_arg_text(id)?;
let loc = db.lookup_intern_macro_call(id);
+ if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() {
+ return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())));
+ }
+
+ let arg = db.macro_arg_text(id)?;
+
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
@@ -339,7 +347,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.map(|it| it.syntax().clone())
.collect()
}
- MacroCallKind::Attr { is_derive: true, .. } => return None,
+ MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring);
ast::Item::cast(node.clone())?
@@ -376,7 +384,17 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
return None;
}
}
- Some(arg.green().into())
+ if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
+ Some(
+ mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr)
+ .0
+ .syntax_node()
+ .green()
+ .into(),
+ )
+ } else {
+ Some(arg.green().into())
+ }
}
fn macro_def(
@@ -385,13 +403,14 @@ fn macro_def(
) -> Result<Arc<TokenExpander>, mbe::ParseError> {
match id.kind {
MacroDefKind::Declarative(ast_id) => {
+ let is_2021 = db.crate_graph()[id.krate].edition >= Edition::Edition2021;
let (mac, def_site_token_map) = match ast_id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => {
let arg = macro_rules
.token_tree()
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt)?;
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021)?;
(mac, def_site_token_map)
}
ast::Macro::MacroDef(macro_def) => {
@@ -399,7 +418,7 @@ fn macro_def(
.body()
.ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro2(&tt)?;
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021)?;
(mac, def_site_token_map)
}
};
@@ -412,82 +431,98 @@ fn macro_def(
MacroDefKind::BuiltInDerive(expander, _) => {
Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
}
- MacroDefKind::BuiltInEager(..) => {
- // FIXME: Return a random error here just to make the types align.
- // This obviously should do something real instead.
- Err(mbe::ParseError::UnexpectedToken("unexpected eager macro".into()))
+ MacroDefKind::BuiltInEager(expander, ..) => {
+ Ok(Arc::new(TokenExpander::BuiltinEager(expander)))
}
MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
}
}
-fn macro_expand(
- db: &dyn ExpandDatabase,
- id: MacroCallId,
-) -> ExpandResult<Option<Arc<tt::Subtree>>> {
+fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
- let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
- if let Some(eager) = &loc.eager {
- return ExpandResult {
- value: Some(eager.arg_or_expansion.clone()),
- // FIXME: There could be errors here!
- err: None,
- };
+ let loc = db.lookup_intern_macro_call(id);
+ if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() {
+ // This is an input expansion for an eager macro. These are already pre-expanded
+ return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() };
}
-
- let macro_arg = match db.macro_arg(id) {
- Some(it) => it,
- None => {
- return ExpandResult::only_err(ExpandError::Other(
- "Failed to lower macro args to token tree".into(),
- ))
- }
- };
-
let expander = match db.macro_def(loc.def) {
Ok(it) => it,
- // FIXME: This is weird -- we effectively report macro *definition*
- // errors lazily, when we try to expand the macro. Instead, they should
- // be reported at the definition site (when we construct a def map).
+ // FIXME: We should make sure to enforce a variant that invalid macro
+ // definitions do not get expanders that could reach this call path!
Err(err) => {
- return ExpandResult::only_err(ExpandError::Other(
- format!("invalid macro definition: {err}").into(),
- ))
+ return ExpandResult {
+ value: Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: vec![],
+ }),
+ err: Some(ExpandError::other(format!("invalid macro definition: {err}"))),
+ }
}
};
- let ExpandResult { value: mut tt, err } = expander.expand(db, id, &macro_arg.0);
+ let Some(macro_arg) = db.macro_arg(id) else {
+ return ExpandResult {
+ value: Arc::new(
+ tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: Vec::new(),
+ },
+ ),
+ // FIXME: We should make sure to enforce a variant that invalid macro
+ // calls do not reach this call path!
+ err: Some(ExpandError::other(
+ "invalid token tree"
+ )),
+ };
+ };
+ let (arg_tt, arg_tm, undo_info) = &*macro_arg;
+ let ExpandResult { value: mut tt, mut err } = expander.expand(db, id, arg_tt);
+
+ if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
+ // FIXME: We should report both errors!
+ err = error.clone().or(err);
+ }
+
// Set a hard limit for the expanded tt
let count = tt.count();
if TOKEN_LIMIT.check(count).is_err() {
- return ExpandResult::only_err(ExpandError::Other(
- format!(
+ return ExpandResult {
+ value: Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: vec![],
+ }),
+ err: Some(ExpandError::other(format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
count,
TOKEN_LIMIT.inner(),
- )
- .into(),
- ));
+ ))),
+ };
}
- fixup::reverse_fixups(&mut tt, &macro_arg.1, &macro_arg.2);
+ fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
- ExpandResult { value: Some(Arc::new(tt)), err }
+ ExpandResult { value: Arc::new(tt), err }
}
-fn macro_expand_error(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
- db.macro_expand(macro_call).err
+fn parse_macro_expansion_error(
+ db: &dyn ExpandDatabase,
+ macro_call_id: MacroCallId,
+) -> ExpandResult<Box<[SyntaxError]>> {
+ db.parse_macro_expansion(MacroFile { macro_call_id })
+ .map(|it| it.0.errors().to_vec().into_boxed_slice())
}
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
- let macro_arg = match db.macro_arg(id) {
- Some(it) => it,
- None => {
- return ExpandResult::with_err(
- tt::Subtree::empty(),
- ExpandError::Other("No arguments for proc-macro".into()),
- )
- }
+ let loc = db.lookup_intern_macro_call(id);
+ let Some(macro_arg) = db.macro_arg(id) else {
+ return ExpandResult {
+ value: tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: Vec::new(),
+ },
+ err: Some(ExpandError::other(
+ "invalid token tree"
+ )),
+ };
};
let expander = match loc.def.kind {
@@ -512,8 +547,7 @@ fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFram
}
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
- loc.kind.expand_to()
+ db.lookup_intern_macro_call(id).expand_to()
}
fn token_tree_to_syntax_node(
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index aca41b11f..7ee3fd375 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -18,10 +18,9 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
-use std::sync::Arc;
-
use base_db::CrateId;
-use syntax::{ted, SyntaxNode};
+use syntax::{ted, Parse, SyntaxNode};
+use triomphe::Arc;
use crate::{
ast::{self, AstNode},
@@ -32,143 +31,77 @@ use crate::{
MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
};
-#[derive(Debug)]
-pub struct ErrorEmitted {
- _private: (),
-}
-
-pub trait ErrorSink {
- fn emit(&mut self, err: ExpandError);
-
- fn option<T>(
- &mut self,
- opt: Option<T>,
- error: impl FnOnce() -> ExpandError,
- ) -> Result<T, ErrorEmitted> {
- match opt {
- Some(it) => Ok(it),
- None => {
- self.emit(error());
- Err(ErrorEmitted { _private: () })
- }
- }
- }
-
- fn option_with<T>(
- &mut self,
- opt: impl FnOnce() -> Option<T>,
- error: impl FnOnce() -> ExpandError,
- ) -> Result<T, ErrorEmitted> {
- self.option(opt(), error)
- }
-
- fn result<T>(&mut self, res: Result<T, ExpandError>) -> Result<T, ErrorEmitted> {
- match res {
- Ok(it) => Ok(it),
- Err(e) => {
- self.emit(e);
- Err(ErrorEmitted { _private: () })
- }
- }
- }
-
- fn expand_result_option<T>(&mut self, res: ExpandResult<Option<T>>) -> Result<T, ErrorEmitted> {
- match (res.value, res.err) {
- (None, Some(err)) => {
- self.emit(err);
- Err(ErrorEmitted { _private: () })
- }
- (Some(value), opt_err) => {
- if let Some(err) = opt_err {
- self.emit(err);
- }
- Ok(value)
- }
- (None, None) => unreachable!("`ExpandResult` without value or error"),
- }
- }
-}
-
-impl ErrorSink for &'_ mut dyn FnMut(ExpandError) {
- fn emit(&mut self, err: ExpandError) {
- self(err);
- }
-}
-
-pub fn expand_eager_macro(
+pub fn expand_eager_macro_input(
db: &dyn ExpandDatabase,
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
- diagnostic_sink: &mut dyn FnMut(ExpandError),
-) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
- let hygiene = Hygiene::new(db, macro_call.file_id);
- let parsed_args = macro_call
- .value
- .token_tree()
- .map(|tt| mbe::syntax_node_to_token_tree(tt.syntax()).0)
- .unwrap_or_else(tt::Subtree::empty);
+) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
+ assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..)));
+ let token_tree = macro_call.value.token_tree();
+
+ let Some(token_tree) = token_tree else {
+ return Ok(ExpandResult { value: None, err:
+ Some(ExpandError::other(
+ "invalid token tree"
+ )),
+ });
+ };
+ let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax());
let ast_map = db.ast_id_map(macro_call.file_id);
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
let expand_to = ExpandTo::from_call_site(&macro_call.value);
// Note:
- // When `lazy_expand` is called, its *parent* file must be already exists.
- // Here we store an eager macro id for the argument expanded subtree here
+ // When `lazy_expand` is called, its *parent* file must already exist.
+ // Here we store an eager macro id for the argument expanded subtree
// for that purpose.
let arg_id = db.intern_macro_call(MacroCallLoc {
def,
krate,
- eager: Some(EagerCallInfo {
- arg_or_expansion: Arc::new(parsed_args.clone()),
- included_file: None,
- }),
+ eager: Some(Box::new(EagerCallInfo {
+ arg: Arc::new((parsed_args, arg_token_map)),
+ arg_id: None,
+ error: None,
+ })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
});
-
- let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr).0;
- let result = match eager_macro_recur(
+ let arg_as_expr = match db.macro_arg_text(arg_id) {
+ Some(it) => it,
+ None => {
+ return Ok(ExpandResult {
+ value: None,
+ err: Some(ExpandError::other("invalid token tree")),
+ })
+ }
+ };
+ let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur(
db,
- &hygiene,
- InFile::new(arg_id.as_file(), parsed_args.syntax_node()),
+ &Hygiene::new(db, macro_call.file_id),
+ InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)),
krate,
resolver,
- diagnostic_sink,
- ) {
- Ok(Ok(it)) => it,
- Ok(Err(err)) => return Ok(Err(err)),
- Err(err) => return Err(err),
+ )?;
+ let Some(expanded_eager_input) = expanded_eager_input else {
+ return Ok(ExpandResult { value: None, err })
};
- let subtree = to_subtree(&result);
-
- if let MacroDefKind::BuiltInEager(eager, _) = def.kind {
- let res = eager.expand(db, arg_id, &subtree);
- if let Some(err) = res.err {
- diagnostic_sink(err);
- }
-
- let loc = MacroCallLoc {
- def,
- krate,
- eager: Some(EagerCallInfo {
- arg_or_expansion: Arc::new(res.value.subtree),
- included_file: res.value.included_file,
- }),
- kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
- };
+ let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input);
+ subtree.delimiter = crate::tt::Delimiter::unspecified();
- Ok(Ok(db.intern_macro_call(loc)))
- } else {
- panic!("called `expand_eager_macro` on non-eager macro def {def:?}");
- }
-}
+ let loc = MacroCallLoc {
+ def,
+ krate,
+ eager: Some(Box::new(EagerCallInfo {
+ arg: Arc::new((subtree, token_map)),
+ arg_id: Some(arg_id),
+ error: err.clone(),
+ })),
+ kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
+ };
-fn to_subtree(node: &SyntaxNode) -> crate::tt::Subtree {
- let mut subtree = mbe::syntax_node_to_token_tree(node).0;
- subtree.delimiter = crate::tt::Delimiter::unspecified();
- subtree
+ Ok(ExpandResult { value: Some(db.intern_macro_call(loc)), err })
}
fn lazy_expand(
@@ -176,7 +109,7 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
-) -> ExpandResult<Option<InFile<SyntaxNode>>> {
+) -> ExpandResult<InFile<Parse<SyntaxNode>>> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
@@ -186,10 +119,9 @@ fn lazy_expand(
MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
);
- let err = db.macro_expand_error(id);
- let value = db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node));
+ let macro_file = id.as_macro_file();
- ExpandResult { value, err }
+ db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0))
}
fn eager_macro_recur(
@@ -198,69 +130,83 @@ fn eager_macro_recur(
curr: InFile<SyntaxNode>,
krate: CrateId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
- mut diagnostic_sink: &mut dyn FnMut(ExpandError),
-) -> Result<Result<SyntaxNode, ErrorEmitted>, UnresolvedMacro> {
+) -> Result<ExpandResult<Option<SyntaxNode>>, UnresolvedMacro> {
let original = curr.value.clone_for_update();
let children = original.descendants().filter_map(ast::MacroCall::cast);
let mut replacements = Vec::new();
+ // Note: We only report a single error inside of eager expansions
+ let mut error = None;
+
// Collect replacement
for child in children {
let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
None => {
- diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
+ error = Some(ExpandError::other("malformed macro invocation"));
continue;
}
};
- let insert = match def.kind {
+ let ExpandResult { value, err } = match def.kind {
MacroDefKind::BuiltInEager(..) => {
- let id = match expand_eager_macro(
+ let ExpandResult { value, err } = match expand_eager_macro_input(
db,
krate,
curr.with_value(child.clone()),
def,
macro_resolver,
- diagnostic_sink,
) {
- Ok(Ok(it)) => it,
- Ok(Err(err)) => return Ok(Err(err)),
+ Ok(it) => it,
Err(err) => return Err(err),
};
- db.parse_or_expand(id.as_file())
- .expect("successful macro expansion should be parseable")
- .clone_for_update()
+ match value {
+ Some(call) => {
+ let ExpandResult { value, err: err2 } =
+ db.parse_macro_expansion(call.as_macro_file());
+ ExpandResult {
+ value: Some(value.0.syntax_node().clone_for_update()),
+ err: err.or(err2),
+ }
+ }
+ None => ExpandResult { value: None, err },
+ }
}
MacroDefKind::Declarative(_)
| MacroDefKind::BuiltIn(..)
| MacroDefKind::BuiltInAttr(..)
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
- let res = lazy_expand(db, &def, curr.with_value(child.clone()), krate);
- let val = match diagnostic_sink.expand_result_option(res) {
- Ok(it) => it,
- Err(err) => return Ok(Err(err)),
- };
+ let ExpandResult { value, err } =
+ lazy_expand(db, &def, curr.with_value(child.clone()), krate);
// replace macro inside
- let hygiene = Hygiene::new(db, val.file_id);
- match eager_macro_recur(db, &hygiene, val, krate, macro_resolver, diagnostic_sink) {
- Ok(Ok(it)) => it,
- Ok(Err(err)) => return Ok(Err(err)),
- Err(err) => return Err(err),
- }
+ let hygiene = Hygiene::new(db, value.file_id);
+ let ExpandResult { value, err: error } = eager_macro_recur(
+ db,
+ &hygiene,
+ // FIXME: We discard parse errors here
+ value.map(|it| it.syntax_node()),
+ krate,
+ macro_resolver,
+ )?;
+ let err = err.or(error);
+ ExpandResult { value, err }
}
};
-
+ if err.is_some() {
+ error = err;
+ }
// check if the whole original syntax is replaced
if child.syntax() == &original {
- return Ok(Ok(insert));
+ return Ok(ExpandResult { value, err: error });
}
- replacements.push((child, insert));
+ if let Some(insert) = value {
+ replacements.push((child, insert));
+ }
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
- Ok(Ok(original))
+ Ok(ExpandResult { value: Some(original), err: error })
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index b273f2176..00796e7c0 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -14,7 +14,7 @@ use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
-#[derive(Debug)]
+#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
@@ -24,7 +24,7 @@ pub(crate) struct SyntaxFixups {
}
/// This is the information needed to reverse the fixups.
-#[derive(Debug, PartialEq, Eq)]
+#[derive(Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
original: Vec<Subtree>,
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index 2eb56fc9e..10f8fe9ce 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -2,8 +2,6 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
-use std::sync::Arc;
-
use base_db::CrateId;
use db::TokenExpander;
use either::Either;
@@ -12,6 +10,7 @@ use syntax::{
ast::{self, HasDocComments},
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
+use triomphe::Arc;
use crate::{
db::{self, ExpandDatabase},
@@ -200,8 +199,14 @@ fn make_hygiene_info(
});
let macro_def = db.macro_def(loc.def).ok()?;
- let (_, exp_map) = db.parse_macro_expansion(macro_file).value?;
- let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+ let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
+ Arc::new((
+ tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
+ Default::default(),
+ Default::default(),
+ ))
+ });
Some(HygieneInfo {
file: macro_file,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index 5e99eacc1..e0c199328 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -20,11 +20,13 @@ pub mod mod_path;
pub mod attrs;
mod fixup;
+use mbe::TokenMap;
pub use mbe::{Origin, ValueResult};
use ::tt::token_id as tt;
+use triomphe::Arc;
-use std::{fmt, hash::Hash, iter, sync::Arc};
+use std::{fmt, hash::Hash, iter};
use base_db::{
impl_intern_key,
@@ -51,12 +53,18 @@ use crate::{
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
-#[derive(Debug, PartialEq, Eq, Clone)]
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum ExpandError {
UnresolvedProcMacro(CrateId),
Mbe(mbe::ExpandError),
- RecursionOverflowPosioned,
- Other(Box<str>),
+ RecursionOverflowPoisoned,
+ Other(Box<Box<str>>),
+}
+
+impl ExpandError {
+ pub fn other(msg: impl Into<Box<str>>) -> Self {
+ ExpandError::Other(Box::new(msg.into()))
+ }
}
impl From<mbe::ExpandError> for ExpandError {
@@ -70,7 +78,7 @@ impl fmt::Display for ExpandError {
match self {
ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
ExpandError::Mbe(it) => it.fmt(f),
- ExpandError::RecursionOverflowPosioned => {
+ ExpandError::RecursionOverflowPoisoned => {
f.write_str("overflow expanding the original macro")
}
ExpandError::Other(it) => f.write_str(it),
@@ -95,9 +103,15 @@ impl fmt::Display for ExpandError {
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct HirFileId(u32);
+impl fmt::Debug for HirFileId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.repr().fmt(f)
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFile {
pub macro_call_id: MacroCallId,
@@ -113,7 +127,8 @@ impl_intern_key!(MacroCallId);
pub struct MacroCallLoc {
pub def: MacroDefId,
pub(crate) krate: CrateId,
- eager: Option<EagerCallInfo>,
+ /// Some if `def` is a builtin eager macro.
+ eager: Option<Box<EagerCallInfo>>,
pub kind: MacroCallKind,
}
@@ -138,8 +153,11 @@ pub enum MacroDefKind {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct EagerCallInfo {
/// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
- arg_or_expansion: Arc<tt::Subtree>,
- included_file: Option<FileId>,
+ arg: Arc<(tt::Subtree, TokenMap)>,
+ /// call id of the eager macro's input file. If this is none, macro call containing this call info
+ /// is an eager macro's input, otherwise it is its output.
+ arg_id: Option<MacroCallId>,
+ error: Option<ExpandError>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -166,8 +184,6 @@ pub enum MacroCallKind {
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
invoc_attr_index: AttrId,
- /// Whether this attribute is the `#[derive]` attribute.
- is_derive: bool,
},
}
@@ -205,10 +221,15 @@ impl HirFileId {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
- file_id = match loc.eager {
- Some(EagerCallInfo { included_file: Some(file), .. }) => file.into(),
+ let is_include_expansion = loc.def.is_include()
+ && matches!(
+ loc.eager.as_deref(),
+ Some(EagerCallInfo { arg_id: Some(_), .. })
+ );
+ file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
+ Some(Ok((_, file))) => file.into(),
_ => loc.kind.file_id(),
- };
+ }
}
}
}
@@ -230,18 +251,17 @@ impl HirFileId {
pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- Some(loc.kind.to_node(db))
+ Some(loc.to_node(db))
}
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
- let mut call =
- db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).kind.to_node(db);
+ let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
loop {
match call.file_id.repr() {
HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)),
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
- call = db.lookup_intern_macro_call(macro_call_id).kind.to_node(db);
+ call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
}
}
}
@@ -255,8 +275,14 @@ impl HirFileId {
let arg_tt = loc.kind.arg(db)?;
let macro_def = db.macro_def(loc.def).ok()?;
- let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
- let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+ let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
+ Arc::new((
+ tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
+ Default::default(),
+ Default::default(),
+ ))
+ });
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
@@ -298,7 +324,7 @@ impl HirFileId {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let attr = match loc.def.kind {
- MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db),
+ MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
_ => return None,
};
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
@@ -319,7 +345,17 @@ impl HirFileId {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.eager, Some(EagerCallInfo { included_file: Some(_), .. }))
+ loc.def.is_include()
+ }
+ _ => false,
+ }
+ }
+
+ pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
+ match self.macro_file() {
+ Some(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. }))
}
_ => false,
}
@@ -342,7 +378,7 @@ impl HirFileId {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. })
+ loc.def.is_attribute_derive()
}
None => false,
}
@@ -413,22 +449,19 @@ impl MacroDefId {
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _)
)
}
-}
-// FIXME: attribute indices do not account for nested `cfg_attr`
+ pub fn is_attribute_derive(&self) -> bool {
+ matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
+ }
-impl MacroCallKind {
- /// Returns the file containing the macro invocation.
- fn file_id(&self) -> HirFileId {
- match *self {
- MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
- | MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
- | MacroCallKind::Attr { ast_id: InFile { file_id, .. }, .. } => file_id,
- }
+ pub fn is_include(&self) -> bool {
+ matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include())
}
+}
+impl MacroCallLoc {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
- match self {
+ match self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
@@ -444,23 +477,49 @@ impl MacroCallKind {
.unwrap_or_else(|| it.syntax().clone())
})
}
- MacroCallKind::Attr { ast_id, is_derive: true, invoc_attr_index, .. } => {
- // FIXME: handle `cfg_attr`
- ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(|it| match it {
- Either::Left(attr) => Some(attr.syntax().clone()),
- Either::Right(_) => None,
- })
- .unwrap_or_else(|| it.syntax().clone())
- })
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ if self.def.is_attribute_derive() {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(invoc_attr_index.ast_index())
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ } else {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
}
- MacroCallKind::Attr { ast_id, .. } => {
- ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ }
+
+ fn expand_to(&self) -> ExpandTo {
+ match self.kind {
+ MacroCallKind::FnLike { expand_to, .. } => expand_to,
+ MacroCallKind::Derive { .. } => ExpandTo::Items,
+ MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Statements,
+ MacroCallKind::Attr { .. } => {
+ // is this always correct?
+ ExpandTo::Items
}
}
}
+}
+
+// FIXME: attribute indices do not account for nested `cfg_attr`
+
+impl MacroCallKind {
+ /// Returns the file containing the macro invocation.
+ fn file_id(&self) -> HirFileId {
+ match *self {
+ MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Attr { ast_id: InFile { file_id, .. }, .. } => file_id,
+ }
+ }
/// Returns the original file range that best describes the location of this macro call.
///
@@ -538,21 +597,16 @@ impl MacroCallKind {
MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
}
}
-
- fn expand_to(&self) -> ExpandTo {
- match self {
- MacroCallKind::FnLike { expand_to, .. } => *expand_to,
- MacroCallKind::Derive { .. } => ExpandTo::Items,
- MacroCallKind::Attr { is_derive: true, .. } => ExpandTo::Statements,
- MacroCallKind::Attr { .. } => ExpandTo::Items, // is this always correct?
- }
- }
}
impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFile { macro_call_id: self }.into()
}
+
+ pub fn as_macro_file(self) -> MacroFile {
+ MacroFile { macro_call_id: self }
+ }
}
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
@@ -610,7 +664,7 @@ impl ExpansionInfo {
let token_range = token.value.text_range();
match &loc.kind {
- MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => {
+ MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
let attr = item
.doc_comments_and_attrs()
@@ -626,7 +680,8 @@ impl ExpansionInfo {
token.value.text_range().checked_sub(attr_input_start)?;
// shift by the item's tree's max id
let token_id = attr_args.1.token_by_range(relative_range)?;
- let token_id = if *is_derive {
+
+ let token_id = if loc.def.is_attribute_derive() {
// we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
token_id
} else {
@@ -645,7 +700,7 @@ impl ExpansionInfo {
let token_id = match token_id_in_attr_input {
Some(token_id) => token_id,
- // the token is not inside an attribute's input so do the lookup in the macro_arg as usual
+ // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual
None => {
let relative_range =
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
@@ -677,20 +732,35 @@ impl ExpansionInfo {
let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
+ // Special case: map tokens from `include!` expansions to the included file
+ if loc.def.is_include()
+ && matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. }))
+ {
+ if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
+ let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
+ let source = db.parse(file_id);
+
+ let token = source.syntax_node().covering_element(range).into_token()?;
+
+ return Some((InFile::new(file_id.into(), token), Origin::Call));
+ }
+ }
+
// Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args, is_derive: true, .. } => {
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
- }
MacroCallKind::Attr { attr_args, .. } => {
- // try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input
- // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
- match self.macro_arg_shift.unshift(token_id) {
- Some(unshifted) => {
- token_id = unshifted;
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ if loc.def.is_attribute_derive() {
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ } else {
+ // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input
+ // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
+ match self.macro_arg_shift.unshift(token_id) {
+ Some(unshifted) => {
+ token_id = unshifted;
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ None => (&self.macro_arg.1, self.arg.clone()),
}
- None => (&self.macro_arg.1, self.arg.clone()),
}
}
_ => match origin {
@@ -718,7 +788,7 @@ pub type AstId<N> = InFile<FileAstId<N>>;
impl<N: AstNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
- let root = db.parse_or_expand(self.file_id).unwrap();
+ let root = db.parse_or_expand(self.file_id);
db.ast_id_map(self.file_id).get(self.value).to_node(&root)
}
}
@@ -754,7 +824,7 @@ impl<T> InFile<T> {
}
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
- db.parse_or_expand(self.file_id).expect("source created from invalid file")
+ db.parse_or_expand(self.file_id)
}
}
@@ -950,6 +1020,7 @@ fn ascend_node_border_tokens(
let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next);
let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev);
+ // FIXME: Once the token map rewrite is done, this shouldnt need to rely on syntax nodes and tokens anymore
let first = first_token(node)?;
let last = last_token(node)?;
let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?;
@@ -977,6 +1048,7 @@ impl<N: AstNode> InFile<N> {
self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
}
+ // FIXME: this should return `Option<InFileNotHirFile<N>>`
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index e9393cc89..47a8ab7de 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -1,7 +1,7 @@
//! A lowering for `use`-paths (more generally, paths without angle-bracketed segments).
use std::{
- fmt::{self, Display},
+ fmt::{self, Display as _},
iter,
};
@@ -24,6 +24,12 @@ pub struct ModPath {
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct UnescapedModPath<'a>(&'a ModPath);
+impl<'a> UnescapedModPath<'a> {
+ pub fn display(&'a self, db: &'a dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
+ UnescapedDisplay { db, path: self }
+ }
+}
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PathKind {
Plain,
@@ -110,52 +116,30 @@ impl ModPath {
UnescapedModPath(self)
}
- fn _fmt(&self, f: &mut fmt::Formatter<'_>, escaped: bool) -> fmt::Result {
- let mut first_segment = true;
- let mut add_segment = |s| -> fmt::Result {
- if !first_segment {
- f.write_str("::")?;
- }
- first_segment = false;
- f.write_str(s)?;
- Ok(())
- };
- match self.kind {
- PathKind::Plain => {}
- PathKind::Super(0) => add_segment("self")?,
- PathKind::Super(n) => {
- for _ in 0..n {
- add_segment("super")?;
- }
- }
- PathKind::Crate => add_segment("crate")?,
- PathKind::Abs => add_segment("")?,
- PathKind::DollarCrate(_) => add_segment("$crate")?,
- }
- for segment in &self.segments {
- if !first_segment {
- f.write_str("::")?;
- }
- first_segment = false;
- if escaped {
- segment.fmt(f)?
- } else {
- segment.unescaped().fmt(f)?
- };
- }
- Ok(())
+ pub fn display<'a>(&'a self, db: &'a dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
+ Display { db, path: self }
}
}
-impl Display for ModPath {
+struct Display<'a> {
+ db: &'a dyn ExpandDatabase,
+ path: &'a ModPath,
+}
+
+impl<'a> fmt::Display for Display<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self._fmt(f, true)
+ display_fmt_path(self.db, self.path, f, true)
}
}
-impl<'a> Display for UnescapedModPath<'a> {
+struct UnescapedDisplay<'a> {
+ db: &'a dyn ExpandDatabase,
+ path: &'a UnescapedModPath<'a>,
+}
+
+impl<'a> fmt::Display for UnescapedDisplay<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0._fmt(f, false)
+ display_fmt_path(self.db, self.path.0, f, false)
}
}
@@ -164,6 +148,46 @@ impl From<Name> for ModPath {
ModPath::from_segments(PathKind::Plain, iter::once(name))
}
}
+fn display_fmt_path(
+ db: &dyn ExpandDatabase,
+ path: &ModPath,
+ f: &mut fmt::Formatter<'_>,
+ escaped: bool,
+) -> fmt::Result {
+ let mut first_segment = true;
+ let mut add_segment = |s| -> fmt::Result {
+ if !first_segment {
+ f.write_str("::")?;
+ }
+ first_segment = false;
+ f.write_str(s)?;
+ Ok(())
+ };
+ match path.kind {
+ PathKind::Plain => {}
+ PathKind::Super(0) => add_segment("self")?,
+ PathKind::Super(n) => {
+ for _ in 0..n {
+ add_segment("super")?;
+ }
+ }
+ PathKind::Crate => add_segment("crate")?,
+ PathKind::Abs => add_segment("")?,
+ PathKind::DollarCrate(_) => add_segment("$crate")?,
+ }
+ for segment in &path.segments {
+ if !first_segment {
+ f.write_str("::")?;
+ }
+ first_segment = false;
+ if escaped {
+ segment.display(db).fmt(f)?;
+ } else {
+ segment.unescaped().display(db).fmt(f)?;
+ }
+ }
+ Ok(())
+}
fn convert_path(
db: &dyn ExpandDatabase,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index c3462beac..f8dbb8427 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -24,27 +24,6 @@ enum Repr {
TupleField(usize),
}
-impl fmt::Display for Name {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match &self.0 {
- Repr::Text(text) => fmt::Display::fmt(&text, f),
- Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
- }
- }
-}
-
-impl<'a> fmt::Display for UnescapedName<'a> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match &self.0 .0 {
- Repr::Text(text) => {
- let text = text.strip_prefix("r#").unwrap_or(text);
- fmt::Display::fmt(&text, f)
- }
- Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
- }
- }
-}
-
impl<'a> UnescapedName<'a> {
/// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over
/// [`ToString::to_string`] if possible as this conversion is cheaper in the general case.
@@ -60,6 +39,11 @@ impl<'a> UnescapedName<'a> {
Repr::TupleField(it) => SmolStr::new(it.to_string()),
}
}
+
+ pub fn display(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
+ _ = db;
+ UnescapedDisplay { name: self }
+ }
}
impl Name {
@@ -78,7 +62,7 @@ impl Name {
Self::new_text(lt.text().into())
}
- /// Shortcut to create inline plain text name
+ /// Shortcut to create inline plain text name. Panics if `text.len() > 22`
const fn new_inline(text: &str) -> Name {
Name::new_text(SmolStr::new_inline(text))
}
@@ -112,6 +96,17 @@ impl Name {
Name::new_inline("[missing name]")
}
+ /// Generates a new name which is only equal to itself, by incrementing a counter. Due
+ /// its implementation, it should not be used in things that salsa considers, like
+ /// type names or field names, and it should be only used in names of local variables
+ /// and labels and similar things.
+ pub fn generate_new_name() -> Name {
+ use std::sync::atomic::{AtomicUsize, Ordering};
+ static CNT: AtomicUsize = AtomicUsize::new(0);
+ let c = CNT.fetch_add(1, Ordering::Relaxed);
+ Name::new_text(format!("<ra@gennew>{c}").into())
+ }
+
/// Returns the tuple index this name represents if it is a tuple field.
pub fn as_tuple_index(&self) -> Option<usize> {
match self.0 {
@@ -156,6 +151,40 @@ impl Name {
Repr::TupleField(_) => false,
}
}
+
+ pub fn display<'a>(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
+ _ = db;
+ Display { name: self }
+ }
+}
+
+struct Display<'a> {
+ name: &'a Name,
+}
+
+impl<'a> fmt::Display for Display<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.name.0 {
+ Repr::Text(text) => fmt::Display::fmt(&text, f),
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+struct UnescapedDisplay<'a> {
+ name: &'a UnescapedName<'a>,
+}
+
+impl<'a> fmt::Display for UnescapedDisplay<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.name.0 .0 {
+ Repr::Text(text) => {
+ let text = text.strip_prefix("r#").unwrap_or(text);
+ fmt::Display::fmt(&text, f)
+ }
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
}
pub trait AsName {
@@ -337,18 +366,24 @@ pub mod known {
crate_type,
derive,
global_allocator,
+ no_core,
+ no_std,
test,
test_case,
recursion_limit,
feature,
// known methods of lang items
call_once,
+ call_mut,
+ call,
eq,
ne,
ge,
gt,
le,
lt,
+ // known fields of lang items
+ pieces,
// lang items
add_assign,
add,
@@ -363,6 +398,7 @@ pub mod known {
deref,
div_assign,
div,
+ drop,
fn_mut,
fn_once,
future_trait,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
index d758e9302..41675c630 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -7,20 +7,23 @@ use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct ProcMacroExpander {
- proc_macro_id: Option<ProcMacroId>,
+ proc_macro_id: ProcMacroId,
}
+const DUMMY_ID: u32 = !0;
+
impl ProcMacroExpander {
pub fn new(proc_macro_id: ProcMacroId) -> Self {
- Self { proc_macro_id: Some(proc_macro_id) }
+ assert_ne!(proc_macro_id.0, DUMMY_ID);
+ Self { proc_macro_id }
}
pub fn dummy() -> Self {
- Self { proc_macro_id: None }
+ Self { proc_macro_id: ProcMacroId(DUMMY_ID) }
}
pub fn is_dummy(&self) -> bool {
- self.proc_macro_id.is_none()
+ self.proc_macro_id.0 == DUMMY_ID
}
pub fn expand(
@@ -32,33 +35,37 @@ impl ProcMacroExpander {
attr_arg: Option<&tt::Subtree>,
) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id {
- Some(id) => {
- let krate_graph = db.crate_graph();
- let proc_macros = match &krate_graph[def_crate].proc_macro {
- Ok(proc_macros) => proc_macros,
- Err(_) => {
+ ProcMacroId(DUMMY_ID) => {
+ ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
+ }
+ ProcMacroId(id) => {
+ let proc_macros = db.proc_macros();
+ let proc_macros = match proc_macros.get(&def_crate) {
+ Some(Ok(proc_macros)) => proc_macros,
+ Some(Err(_)) | None => {
never!("Non-dummy expander even though there are no proc macros");
- return ExpandResult::with_err(
+ return ExpandResult::new(
tt::Subtree::empty(),
- ExpandError::Other("Internal error".into()),
+ ExpandError::other("Internal error"),
);
}
};
- let proc_macro = match proc_macros.get(id.0 as usize) {
+ let proc_macro = match proc_macros.get(id as usize) {
Some(proc_macro) => proc_macro,
None => {
never!(
"Proc macro index out of bounds: the length is {} but the index is {}",
proc_macros.len(),
- id.0
+ id
);
- return ExpandResult::with_err(
+ return ExpandResult::new(
tt::Subtree::empty(),
- ExpandError::Other("Internal error".into()),
+ ExpandError::other("Internal error"),
);
}
};
+ let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
match proc_macro.expander.expand(tt, attr_arg, env) {
@@ -68,23 +75,15 @@ impl ProcMacroExpander {
ProcMacroExpansionError::System(text)
if proc_macro.kind == ProcMacroKind::Attr =>
{
- ExpandResult {
- value: tt.clone(),
- err: Some(ExpandError::Other(text.into())),
- }
+ ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
}
ProcMacroExpansionError::System(text)
- | ProcMacroExpansionError::Panic(text) => ExpandResult::with_err(
- tt::Subtree::empty(),
- ExpandError::Other(text.into()),
- ),
+ | ProcMacroExpansionError::Panic(text) => {
+ ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
+ }
},
}
}
- None => ExpandResult::with_err(
- tt::Subtree::empty(),
- ExpandError::UnresolvedProcMacro(def_crate),
- ),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
index 63586f9da..ab3809abc 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
@@ -162,6 +162,12 @@ impl ToTokenTree for crate::tt::TokenTree {
}
}
+impl ToTokenTree for &crate::tt::TokenTree {
+ fn to_token(self) -> crate::tt::TokenTree {
+ self.clone()
+ }
+}
+
impl ToTokenTree for crate::tt::Subtree {
fn to_token(self) -> crate::tt::TokenTree {
self.into()