summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir-expand
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir-expand')
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs65
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs195
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs39
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs460
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs246
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs716
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs189
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/files.rs375
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs376
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs423
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs1027
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs85
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/quote.rs170
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/span.rs124
17 files changed, 2456 insertions, 2067 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 1f27204c1..361bbec43 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -13,11 +13,11 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-tracing = "0.1.35"
-either = "1.7.0"
+tracing.workspace = true
+either.workspace = true
rustc-hash = "1.1.0"
la-arena.workspace = true
-itertools = "0.10.5"
+itertools.workspace = true
hashbrown.workspace = true
smallvec.workspace = true
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
index 1906ed15b..be0b72f9d 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -12,11 +12,40 @@ use std::{
marker::PhantomData,
};
-use la_arena::{Arena, Idx};
+use la_arena::{Arena, Idx, RawIdx};
use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+use crate::db;
+
+pub use base_db::span::ErasedFileAstId;
+
+/// `AstId` points to an AST node in any file.
+///
+/// It is stable across reparses, and can be used as salsa key/value.
+pub type AstId<N> = crate::InFile<FileAstId<N>>;
+
+impl<N: AstIdNode> AstId<N> {
+ pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
+ self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
+ }
+ pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
+ crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
+ }
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
+ db.ast_id_map(self.file_id).get(self.value)
+ }
+}
+
+pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
+
+impl ErasedAstId {
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
+ db.ast_id_map(self.file_id).get_erased(self.value)
+ }
+}
+
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
@@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
}
}
-pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
-
pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => {
@@ -99,7 +126,7 @@ register_ast_id_node! {
TraitAlias,
TypeAlias,
Use,
- AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
+ AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg, Param, SelfParam
}
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
@@ -129,6 +156,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
+
+ // make sure to allocate the root node
+ if !should_alloc_id(node.kind()) {
+ res.alloc(node);
+ }
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
@@ -136,9 +168,9 @@ impl AstIdMap {
bdfs(node, |it| {
if should_alloc_id(it.kind()) {
res.alloc(&it);
- true
+ TreeOrder::BreadthFirst
} else {
- false
+ TreeOrder::DepthFirst
}
});
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@@ -155,6 +187,11 @@ impl AstIdMap {
res
}
+ /// The [`AstId`] of the root node
+ pub fn root(&self) -> SyntaxNodePtr {
+ self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
+ }
+
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
@@ -164,7 +201,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
- pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
+ pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
}
@@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
hasher.finish()
}
+#[derive(Copy, Clone, PartialEq, Eq)]
+enum TreeOrder {
+ BreadthFirst,
+ DepthFirst,
+}
+
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
/// order? It is a mix of breadth-first and depth first orders. Nodes for which
-/// `f` returns true are visited breadth-first, all the other nodes are explored
-/// depth-first.
+/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
+/// [`TreeOrder::DepthFirst`].
///
/// In other words, the size of the bfs queue is bound by the number of "true"
/// nodes.
-fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
+fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
@@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
while let Some(event) = preorder.next() {
match event {
syntax::WalkEvent::Enter(node) => {
- if f(node.clone()) {
+ if f(node.clone()) == TreeOrder::BreadthFirst {
next_layer.extend(node.children());
preorder.skip_subtree();
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index 0ec2422b3..b8fc30c91 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -1,19 +1,19 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
-use base_db::CrateId;
+use base_db::{span::SyntaxContextId, CrateId};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
-use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
+use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
use crate::{
db::ExpandDatabase,
- hygiene::Hygiene,
mod_path::ModPath,
+ span::SpanMapRef,
tt::{self, Subtree},
InFile,
};
@@ -39,28 +39,33 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
- pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
- let entries = collect_attrs(owner)
- .filter_map(|(id, attr)| match attr {
- Either::Left(attr) => {
- attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
- }
- Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
- id,
- input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
- path: Interned::new(ModPath::from(crate::name!(doc))),
- }),
- })
- .collect::<Vec<_>>();
- // FIXME: use `Arc::from_iter` when it becomes available
- let entries: Arc<[Attr]> = Arc::from(entries);
+ pub fn new(
+ db: &dyn ExpandDatabase,
+ owner: &dyn ast::HasAttrs,
+ span_map: SpanMapRef<'_>,
+ ) -> Self {
+ let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
+ Either::Left(attr) => {
+ attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
+ }
+ Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
+ id,
+ input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
+ path: Interned::new(ModPath::from(crate::name!(doc))),
+ ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
+ }),
+ });
+ let entries: Arc<[Attr]> = Arc::from_iter(entries);
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
- pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
- let hygiene = Hygiene::new(db, owner.file_id);
- Self::new(db, owner.value, &hygiene)
+ pub fn from_attrs_owner(
+ db: &dyn ExpandDatabase,
+ owner: InFile<&dyn ast::HasAttrs>,
+ span_map: SpanMapRef<'_>,
+ ) -> Self {
+ Self::new(db, owner.value, span_map)
}
pub fn merge(&self, other: Self) -> Self {
@@ -71,19 +76,13 @@ impl RawAttrs {
(Some(a), Some(b)) => {
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
Self {
- entries: Some(Arc::from(
- a.iter()
- .cloned()
- .chain(b.iter().map(|it| {
- let mut it = it.clone();
- it.id.id = it.id.ast_index() as u32 + last_ast_index
- | (it.id.cfg_attr_index().unwrap_or(0) as u32)
- << AttrId::AST_INDEX_BITS;
- it
- }))
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- )),
+ entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
+ let mut it = it.clone();
+ it.id.id = it.id.ast_index() as u32 + last_ast_index
+ | (it.id.cfg_attr_index().unwrap_or(0) as u32)
+ << AttrId::AST_INDEX_BITS;
+ it
+ })))),
}
}
}
@@ -100,51 +99,43 @@ impl RawAttrs {
}
let crate_graph = db.crate_graph();
- let new_attrs = Arc::from(
- self.iter()
- .flat_map(|attr| -> SmallVec<[_; 1]> {
- let is_cfg_attr =
- attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
- if !is_cfg_attr {
- return smallvec![attr.clone()];
- }
-
- let subtree = match attr.token_tree_value() {
- Some(it) => it,
- _ => return smallvec![attr.clone()],
- };
+ let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
+ let is_cfg_attr =
+ attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
+ if !is_cfg_attr {
+ return smallvec![attr.clone()];
+ }
- let (cfg, parts) = match parse_cfg_attr_input(subtree) {
- Some(it) => it,
- None => return smallvec![attr.clone()],
+ let subtree = match attr.token_tree_value() {
+ Some(it) => it,
+ _ => return smallvec![attr.clone()],
+ };
+
+ let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+ Some(it) => it,
+ None => return smallvec![attr.clone()],
+ };
+ let index = attr.id;
+ let attrs =
+ parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
+ let tree = Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: attr.to_vec(),
};
- let index = attr.id;
- let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
- |(idx, attr)| {
- let tree = Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: attr.to_vec(),
- };
- // FIXME hygiene
- let hygiene = Hygiene::new_unhygienic();
- Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
- },
- );
-
- let cfg_options = &crate_graph[krate].cfg_options;
- let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
- let cfg = CfgExpr::parse(&cfg);
- if cfg_options.check(&cfg) == Some(false) {
- smallvec![]
- } else {
- cov_mark::hit!(cfg_attr_active);
-
- attrs.collect()
- }
- })
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- );
+ Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
+ });
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+ let cfg = CfgExpr::parse(&cfg);
+ if cfg_options.check(&cfg) == Some(false) {
+ smallvec![]
+ } else {
+ cov_mark::hit!(cfg_attr_active);
+
+ attrs.collect()
+ }
+ }));
RawAttrs { entries: Some(new_attrs) }
}
@@ -185,21 +176,23 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
+ pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
/// `#[attr = "string"]`
+ // FIXME: This is losing span
Literal(SmolStr),
/// `#[attr(subtree)]`
- TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
+ TokenTree(Box<tt::Subtree>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
- AttrInput::TokenTree(tt) => tt.0.fmt(f),
+ AttrInput::TokenTree(tt) => tt.fmt(f),
}
}
}
@@ -208,10 +201,10 @@ impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
- let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
+ let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(),
@@ -219,24 +212,20 @@ impl Attr {
};
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
- let (tree, map) = syntax_node_to_token_tree(tt.syntax());
- Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
+ let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
+ Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
- Some(Attr { id, path, input })
+ Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
}
- fn from_tt(
- db: &dyn ExpandDatabase,
- tt: &tt::Subtree,
- hygiene: &Hygiene,
- id: AttrId,
- ) -> Option<Attr> {
- let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
+ fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
+ // FIXME: Unecessary roundtrip tt -> ast -> tt
+ let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
- Self::from_src(db, ast, hygiene, id)
+ Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
}
pub fn path(&self) -> &ModPath {
@@ -256,7 +245,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
- AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
+ AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@@ -267,7 +256,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
- AttrInput::TokenTree(tt) => Some(&tt.0),
+ AttrInput::TokenTree(tt) => Some(tt),
_ => None,
}
}
@@ -276,8 +265,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
- hygiene: &'a Hygiene,
- ) -> Option<impl Iterator<Item = ModPath> + 'a> {
+ ) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis {
@@ -290,12 +278,13 @@ impl Attr {
if tts.is_empty() {
return None;
}
- // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
+ // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
+ // here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: tts.into_iter().cloned().collect(),
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: tts.to_vec(),
};
- let (parse, _) =
+ let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
@@ -304,7 +293,11 @@ impl Attr {
return None;
}
let path = meta.path()?;
- ModPath::from_src(db, path, hygiene)
+ let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
+ Some((
+ ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
+ call_site,
+ ))
});
Some(paths)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 4ee12e2f2..de58a495f 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -1,16 +1,22 @@
//! Builtin attributes.
+use base_db::{
+ span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+};
+use syntax::{TextRange, TextSize};
+
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
- ( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
+ ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinAttrExpander {
$($variant),*
}
impl BuiltinAttrExpander {
- pub fn expand(
+ pub fn $expand_fn(
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
@@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
}
}
-register_builtin! {
+register_builtin! { expand:
(bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
@@ -77,9 +83,8 @@ fn dummy_attr_expand(
///
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
/// ```
-/// #[Foo]
-/// #[bar::Bar]
-/// ();
+/// #![Foo]
+/// #![bar::Bar]
/// ```
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
/// Since we do not expand the attribute in nameres though, we keep the original item.
@@ -98,21 +103,31 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
- _ => return ExpandResult::ok(tt::Subtree::empty()),
+ MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
+ attr_args
+ }
+ _ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
};
- pseudo_derive_attr_expansion(tt, derives)
+ pseudo_derive_attr_expansion(tt, derives, loc.call_site)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
+ call_site: SyntaxContextId,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
spacing: tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
+ span: tt::SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: base_db::span::SpanAnchor {
+ file_id: FileId::BOGUS,
+ ast_id: ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: call_site,
+ },
}))
};
@@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
token_trees.push(mk_leaf('#'));
+ token_trees.push(mk_leaf('!'));
token_trees.push(mk_leaf('['));
token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']'));
}
- token_trees.push(mk_leaf('('));
- token_trees.push(mk_leaf(')'));
- token_trees.push(mk_leaf(';'));
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index ecc8b407a..410aa4d28 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -1,16 +1,16 @@
//! Builtin derives.
-use ::tt::Ident;
-use base_db::{CrateOrigin, LangCrateOrigin};
+use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
use itertools::izip;
-use mbe::TokenMap;
use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
use crate::{
+ hygiene::span_with_def_site_ctxt,
name::{AsName, Name},
- tt::{self, TokenId},
+ span::SpanMapRef,
+ tt,
};
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
@@ -29,12 +29,15 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
- token_map: &TokenMap,
+ token_map: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
- expander(db, id, tt, token_map)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, span, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@@ -70,19 +73,19 @@ enum VariantShape {
Unit,
}
-fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
- (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
+fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
+ (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
}
impl VariantShape {
- fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
- self.as_pattern_map(path, |it| quote!(#it))
+ fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
+ self.as_pattern_map(path, span, |it| quote!(span => #it))
}
- fn field_names(&self) -> Vec<tt::Ident> {
+ fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
match self {
VariantShape::Struct(s) => s.clone(),
- VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
+ VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![],
}
}
@@ -90,26 +93,27 @@ impl VariantShape {
fn as_pattern_map(
&self,
path: tt::Subtree,
+ span: SpanData,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| {
let mapped = field_map(it);
- quote! { #it : #mapped , }
+ quote! {span => #it : #mapped , }
});
- quote! {
+ quote! {span =>
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
- let fields = tuple_field_iterator(n).map(|it| {
+ let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it);
- quote! {
+ quote! {span =>
#mapped ,
}
});
- quote! {
+ quote! {span =>
#path ( ##fields )
}
}
@@ -117,7 +121,7 @@ impl VariantShape {
}
}
- fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
+ fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@@ -139,17 +143,17 @@ enum AdtShape {
}
impl AdtShape {
- fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
- self.as_pattern_map(name, |it| quote!(#it))
+ fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
+ self.as_pattern_map(name, |it| quote!(span =>#it), span)
}
- fn field_names(&self) -> Vec<Vec<tt::Ident>> {
+ fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
match self {
AdtShape::Struct(s) => {
- vec![s.field_names()]
+ vec![s.field_names(span)]
}
AdtShape::Enum { variants, .. } => {
- variants.iter().map(|(_, fields)| fields.field_names()).collect()
+ variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
}
AdtShape::Union => {
never!("using fields of union in derive is always wrong");
@@ -162,18 +166,21 @@ impl AdtShape {
&self,
name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
+ span: SpanData,
) -> Vec<tt::Subtree> {
match self {
AdtShape::Struct(s) => {
- vec![s.as_pattern_map(quote! { #name }, field_map)]
+ vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
}
AdtShape::Enum { variants, .. } => variants
.iter()
- .map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
+ .map(|(v, fields)| {
+ fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
+ })
.collect(),
AdtShape::Union => {
never!("pattern matching on union is always wrong");
- vec![quote! { un }]
+ vec![quote! {span => un }]
}
}
}
@@ -189,8 +196,12 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
-fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
- let (name, generic_param_list, shape) = match &adt {
+fn parse_adt(
+ tm: SpanMapRef<'_>,
+ adt: &ast::Adt,
+ call_site: SpanData,
+) -> Result<BasicAdtInfo, ExpandError> {
+ let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
@@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this {
Some(it) => {
param_type_set.insert(it.as_name());
- mbe::syntax_node_to_token_tree(it.syntax()).0
+ mbe::syntax_node_to_token_tree(it.syntax(), tm)
+ }
+ None => {
+ tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}
- None => tt::Subtree::empty(),
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => {
- it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
+ it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
}
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
- .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
- .unwrap_or_else(tt::Subtree::empty);
+ .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
+ .unwrap_or_else(|| {
+ tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
+ });
Some(ty)
} else {
None
@@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
- .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
+ .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
.collect();
- let name_token = name_to_token(&tm, name)?;
+ let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
-fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
+fn name_to_token(
+ token_map: SpanMapRef<'_>,
+ name: Option<ast::Name>,
+) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
ExpandError::other("missing name")
})?;
- let name_token_id =
- token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
- let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
+ let span = token_map.span_for_range(name.syntax().text_range());
+ let name_token = tt::Ident { span, text: name.text().into() };
Ok(name_token)
}
@@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
+ // FIXME: use
+ invoc_span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let info = match parse_adt(tm, tt) {
+ let info = match parse_adt(tm, tt, invoc_span) {
Ok(info) => info,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
+ e,
+ )
+ }
};
let trait_body = make_trait_body(&info);
let mut where_block = vec![];
@@ -349,13 +373,13 @@ fn expand_simple_derive(
let ident_ = ident.clone();
if let Some(b) = bound {
let ident = ident.clone();
- where_block.push(quote! { #ident : #b , });
+ where_block.push(quote! {invoc_span => #ident : #b , });
}
if let Some(ty) = param_ty {
- (quote! { const #ident : #ty , }, quote! { #ident_ , })
+ (quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
} else {
let bound = trait_path.clone();
- (quote! { #ident : #bound , }, quote! { #ident_ , })
+ (quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
}
})
.unzip();
@@ -363,17 +387,17 @@ fn expand_simple_derive(
where_block.extend(info.associated_types.iter().map(|it| {
let it = it.clone();
let bound = trait_path.clone();
- quote! { #it : #bound , }
+ quote! {invoc_span => #it : #bound , }
}));
let name = info.name;
- let expanded = quote! {
+ let expanded = quote! {invoc_span =>
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
};
ExpandResult::ok(expanded)
}
-fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
+fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
cov_mark::hit!(test_copy_expand_in_core);
- quote! { crate }
+ quote! {span => crate }
} else {
- quote! { core }
+ quote! {span => core }
};
tt.token_trees[0].clone()
@@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn clone(&self) -> Self {
#star self
}
};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn clone(&self) -> Self {
match #star self {}
}
};
}
let name = &adt.name;
- let patterns = adt.shape.as_pattern(name);
- let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
+ let patterns = adt.shape.as_pattern(span, name);
+ let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
- let fat_arrow = fat_arrow();
- quote! {
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
#pat #fat_arrow #expr,
}
});
- quote! {
+ quote! {span =>
fn clone(&self) -> Self {
match self {
##arms
@@ -451,53 +469,56 @@ fn clone_expand(
})
}
-/// This function exists since `quote! { => }` doesn't work.
-fn fat_arrow() -> ::tt::Subtree<TokenId> {
- let eq =
- tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
- quote! { #eq> }
+/// This function exists since `quote! {span => => }` doesn't work.
+fn fat_arrow(span: SpanData) -> tt::Subtree {
+ let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
+ quote! {span => #eq> }
}
-/// This function exists since `quote! { && }` doesn't work.
-fn and_and() -> ::tt::Subtree<TokenId> {
- let and =
- tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
- quote! { #and& }
+/// This function exists since `quote! {span => && }` doesn't work.
+fn and_and(span: SpanData) -> tt::Subtree {
+ let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
+ quote! {span => #and& }
}
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
- fields
- .as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
+ fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ )
}
AdtShape::Enum { default_variant, variants } => {
if let Some(d) = default_variant {
let (name, fields) = &variants[*d];
let adt_name = &adt.name;
fields.as_pattern_map(
- quote!(#adt_name :: #name),
- |_| quote!(#krate::default::Default::default()),
+ quote!(span =>#adt_name :: #name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
)
} else {
// FIXME: Return expand error here
- quote!()
+ quote!(span =>)
}
}
AdtShape::Union => {
// FIXME: Return expand error here
- quote!()
+ quote!(span =>)
}
};
- quote! {
+ quote! {span =>
fn default() -> Self {
#body
}
@@ -508,44 +529,41 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
let x_string = it.to_string();
- quote! {
+ quote! {span =>
.field(#x_string, & #it)
}
});
- quote! {
+ quote! {span =>
f.debug_struct(#name) ##for_fields .finish()
}
}
VariantShape::Tuple(n) => {
- let for_fields = tuple_field_iterator(*n).map(|it| {
- quote! {
+ let for_fields = tuple_field_iterator(span, *n).map(|it| {
+ quote! {span =>
.field( & #it)
}
});
- quote! {
+ quote! {span =>
f.debug_tuple(#name) ##for_fields .finish()
}
}
- VariantShape::Unit => quote! {
+ VariantShape::Unit => quote! {span =>
f.write_str(#name)
},
};
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {}
}
@@ -553,20 +571,20 @@ fn debug_expand(
}
let arms = match &adt.shape {
AdtShape::Struct(fields) => {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let name = &adt.name;
- let pat = fields.as_pattern(quote!(#name));
+ let pat = fields.as_pattern(quote!(span =>#name), span);
let expr = for_variant(name.to_string(), fields);
- vec![quote! { #pat #fat_arrow #expr }]
+ vec![quote! {span => #pat #fat_arrow #expr }]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(name, v)| {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let adt_name = &adt.name;
- let pat = v.as_pattern(quote!(#adt_name :: #name));
+ let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
let expr = for_variant(name.to_string(), v);
- quote! {
+ quote! {span =>
#pat #fat_arrow #expr ,
}
})
@@ -576,7 +594,7 @@ fn debug_expand(
vec![]
}
};
- quote! {
+ quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match self {
##arms
@@ -589,47 +607,46 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote! {};
+ return quote! {span =>};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
}
};
}
- let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
- |(pat, names)| {
- let expr = {
- let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
- quote! { {
- ##it
- } }
- };
- let fat_arrow = fat_arrow();
- quote! {
- #pat #fat_arrow #expr ,
- }
- },
- );
+ let arms =
+ adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
+ |(pat, names)| {
+ let expr = {
+ let it =
+ names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
+ quote! {span => {
+ ##it
+ } }
+ };
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
+ #pat #fat_arrow #expr ,
+ }
+ },
+ );
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
- quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
+ quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
} else {
- quote! {}
+ quote! {span =>}
};
- quote! {
+ quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
#check_discriminant
match self {
@@ -643,56 +660,58 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote! {};
+ return quote! {span =>};
}
let name = &adt.name;
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, names)| {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let body = match &*names {
[] => {
- quote!(true)
+ quote!(span =>true)
}
[first, rest @ ..] => {
let rest = rest.iter().map(|it| {
- let t1 = Ident::new(format!("{}_self", it.text), it.span);
- let t2 = Ident::new(format!("{}_other", it.text), it.span);
- let and_and = and_and();
- quote!(#and_and #t1 .eq( #t2 ))
+ let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
+ let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
+ let and_and = and_and(span);
+ quote!(span =>#and_and #t1 .eq( #t2 ))
});
let first = {
- let t1 = Ident::new(format!("{}_self", first.text), first.span);
- let t2 = Ident::new(format!("{}_other", first.text), first.span);
- quote!(#t1 .eq( #t2 ))
+ let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
+ let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
+ quote!(span =>#t1 .eq( #t2 ))
};
- quote!(#first ##rest)
+ quote!(span =>#first ##rest)
}
};
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
- quote! {
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
fn eq(&self, other: &Self) -> bool {
match (self, other) {
##arms
@@ -706,35 +725,46 @@ fn partial_eq_expand(
fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
+ span: SpanData,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
- let self_patterns = adt.shape.as_pattern_map(name, |it| {
- let t = Ident::new(format!("{}_self", it.text), it.span);
- quote!(#t)
- });
- let other_patterns = adt.shape.as_pattern_map(name, |it| {
- let t = Ident::new(format!("{}_other", it.text), it.span);
- quote!(#t)
- });
+ let self_patterns = adt.shape.as_pattern_map(
+ name,
+ |it| {
+ let t = tt::Ident::new(format!("{}_self", it.text), it.span);
+ quote!(span =>#t)
+ },
+ span,
+ );
+ let other_patterns = adt.shape.as_pattern_map(
+ name,
+ |it| {
+ let t = tt::Ident::new(format!("{}_other", it.text), it.span);
+ quote!(span =>#t)
+ },
+ span,
+ );
(self_patterns, other_patterns)
}
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
+ span: SpanData,
) -> tt::Subtree {
- let fat_arrow1 = fat_arrow();
- let fat_arrow2 = fat_arrow();
- quote! {
+ let fat_arrow1 = fat_arrow(span);
+ let fat_arrow2 = fat_arrow(span);
+ quote! {span =>
match #left.cmp(&#right) {
#krate::cmp::Ordering::Equal #fat_arrow1 {
#rest
@@ -745,34 +775,34 @@ fn ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote!();
+ return quote!(span =>);
}
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
- let mut body = quote!(#krate::cmp::Ordering::Equal);
+ let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() {
- let t1 = Ident::new(format!("{}_self", f.text), f.span);
- let t2 = Ident::new(format!("{}_other", f.text), f.span);
- body = compare(krate, quote!(#t1), quote!(#t2), body);
+ let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
- let fat_arrow = fat_arrow();
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ let fat_arrow = fat_arrow(span);
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
- let mut body = quote! {
+ let fat_arrow = fat_arrow(span);
+ let mut body = quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::cmp::Ordering::Equal
}
};
if matches!(&adt.shape, AdtShape::Enum { .. }) {
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
- body = compare(krate, left, right, body);
+ let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
+ let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
+ body = compare(krate, left, right, body, span);
}
- quote! {
+ quote! {span =>
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
}
@@ -783,20 +813,22 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
+ span: SpanData,
) -> tt::Subtree {
- let fat_arrow1 = fat_arrow();
- let fat_arrow2 = fat_arrow();
- quote! {
+ let fat_arrow1 = fat_arrow(span);
+ let fat_arrow2 = fat_arrow(span);
+ quote! {span =>
match #left.partial_cmp(&#right) {
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
#rest
@@ -807,37 +839,39 @@ fn partial_ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote!();
+ return quote!(span =>);
}
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
+ let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
+ let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
- let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
+ let mut body =
+ quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() {
- let t1 = Ident::new(format!("{}_self", f.text), f.span);
- let t2 = Ident::new(format!("{}_other", f.text), f.span);
- body = compare(krate, quote!(#t1), quote!(#t2), body);
+ let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
- let fat_arrow = fat_arrow();
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ let fat_arrow = fat_arrow(span);
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let body = compare(
krate,
left,
right,
- quote! {
+ quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
}
},
+ span,
);
- quote! {
+ quote! {span =>
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
#body
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index 30b19b6e5..c8f04bfee 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -1,17 +1,24 @@
//! Builtin macro
-use base_db::{AnchoredPath, Edition, FileId};
+use base_db::{
+ span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ AnchoredPath, Edition, FileId,
+};
use cfg::CfgExpr;
use either::Either;
-use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
+use itertools::Itertools;
+use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
- db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
- MacroCallLoc,
+ db::ExpandDatabase,
+ hygiene::span_with_def_site_ctxt,
+ name, quote,
+ tt::{self, DelimSpan},
+ ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@@ -36,7 +43,10 @@ macro_rules! register_builtin {
let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )*
};
- expander(db, id, tt)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, tt, span)
}
}
@@ -44,13 +54,16 @@ macro_rules! register_builtin {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
- arg_id: MacroCallId,
+ id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
- expander(db, arg_id, tt)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, tt, span)
}
}
@@ -78,7 +91,7 @@ pub fn find_builtin_macro(
register_builtin! {
LAZY:
- (column, Column) => column_expand,
+ (column, Column) => line_expand,
(file, File) => file_expand,
(line, Line) => line_expand,
(module_path, ModulePath) => module_path_expand,
@@ -109,99 +122,108 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand
}
-const DOLLAR_CRATE: tt::Ident =
- tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
+fn mk_pound(span: SpanData) -> tt::Subtree {
+ crate::quote::IntoTt::to_subtree(
+ vec![crate::tt::Leaf::Punct(crate::tt::Punct {
+ char: '#',
+ spacing: crate::tt::Spacing::Alone,
+ span: span,
+ })
+ .into()],
+ span,
+ )
+}
fn module_path_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Just return a dummy result.
- ExpandResult::ok(quote! { "module::path" })
+ ExpandResult::ok(quote! {span =>
+ "module::path"
+ })
}
fn line_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
- let expanded = quote! {
- 0 as u32
- };
-
- ExpandResult::ok(expanded)
+ // Note that `line!` and `column!` will never be implemented properly, as they are by definition
+ // not incremental
+ ExpandResult::ok(tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+ text: "0u32".into(),
+ span,
+ }))],
+ })
}
fn log_syntax_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn trace_macros_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn stringify_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees);
- let expanded = quote! {
+ let expanded = quote! {span =>
#pretty
};
ExpandResult::ok(expanded)
}
-fn column_expand(
- _db: &dyn ExpandDatabase,
- _id: MacroCallId,
- _tt: &tt::Subtree,
-) -> ExpandResult<tt::Subtree> {
- // dummy implementation for type-checking purposes
- let expanded = quote! {
- 0 as u32
- };
-
- ExpandResult::ok(expanded)
-}
-
fn assert_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ',');
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args {
[cond, panic_args @ ..] => {
let comma = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
+ span,
}))],
};
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
- quote! {{
+ quote! {span =>{
if !(#cond) {
- #DOLLAR_CRATE::panic!(##panic_args);
+ #dollar_crate::panic!(##panic_args);
}
}}
}
- [] => quote! {{}},
+ [] => quote! {span =>{}},
};
ExpandResult::ok(expanded)
@@ -211,12 +233,13 @@ fn file_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "".
let file_name = "";
- let expanded = quote! {
+ let expanded = quote! {span =>
#file_name
};
@@ -227,16 +250,18 @@ fn format_args_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- format_args_expand_general(db, id, tt, "")
+ format_args_expand_general(db, id, tt, "", span)
}
fn format_args_nl_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- format_args_expand_general(db, id, tt, "\\n")
+ format_args_expand_general(db, id, tt, "\\n", span)
}
fn format_args_expand_general(
@@ -245,11 +270,12 @@ fn format_args_expand_general(
tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly
_end_string: &str,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- let pound = quote! {@PUNCT '#'};
+ let pound = mk_pound(span);
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
- return ExpandResult::ok(quote! {
+ return ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
});
}
@@ -258,25 +284,25 @@ fn asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them.
-
let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) {
match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{
- let krate = DOLLAR_CRATE.clone();
- literals.push(quote!(#krate::format_args!(#lit);));
+ let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
+ literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
}
_ => break,
}
}
- let pound = quote! {@PUNCT '#'};
- let expanded = quote! {
+ let pound = mk_pound(span);
+ let expanded = quote! {span =>
builtin #pound asm (
{##literals}
)
@@ -288,20 +314,22 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level)
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
- let expanded = if enabled { quote!(true) } else { quote!(false) };
+ let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
ExpandResult::ok(expanded)
}
@@ -309,13 +337,15 @@ fn panic_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
// Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
- quote!(#DOLLAR_CRATE::panic::panic_2021!)
+ quote!(span =>#dollar_crate::panic::panic_2021!)
} else {
- quote!(#DOLLAR_CRATE::panic::panic_2015!)
+ quote!(span =>#dollar_crate::panic::panic_2015!)
};
// Pass the original arguments
@@ -327,13 +357,15 @@ fn unreachable_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}`
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
- quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
+ quote!(span =>#dollar_crate::panic::unreachable_2021!)
} else {
- quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
+ quote!(span =>#dollar_crate::panic::unreachable_2015!)
};
// Pass the original arguments
@@ -363,6 +395,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@@ -372,13 +405,14 @@ fn compile_error_expand(
_ => ExpandError::other("`compile_error!` argument must be a string"),
};
- ExpandResult { value: quote! {}, err: Some(err) }
+ ExpandResult { value: quote! {span =>}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
@@ -418,13 +452,14 @@ fn concat_expand(
}
}
}
- ExpandResult { value: quote!(#text), err }
+ ExpandResult { value: quote!(span =>#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
@@ -457,8 +492,25 @@ fn concat_bytes_expand(
}
}
}
- let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: quote!([#ident]), err }
+ let value = tt::Subtree {
+ delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+ token_trees: {
+ Itertools::intersperse_with(
+ bytes.into_iter().map(|it| {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
+ }),
+ || {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ span,
+ }))
+ },
+ )
+ .collect()
+ },
+ };
+ ExpandResult { value, err }
}
fn concat_bytes_expand_subtree(
@@ -491,6 +543,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
@@ -505,8 +558,9 @@ fn concat_idents_expand(
}
}
}
- let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: quote!(#ident), err }
+ // FIXME merge spans
+ let ident = tt::Ident { text: ident.into(), span };
+ ExpandResult { value: quote!(span =>#ident), err }
}
fn relative_file(
@@ -541,45 +595,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
- _tt: &tt::Subtree,
+ tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- match db.include_expand(arg_id) {
- Ok((res, _)) => ExpandResult::ok(res.0.clone()),
- Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
+ let file_id = match include_input_to_file_id(db, arg_id, tt) {
+ Ok(it) => it,
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
+ };
+ match parse_to_token_tree(
+ SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ SyntaxContextId::ROOT,
+ &db.file_text(file_id),
+ ) {
+ Some(it) => ExpandResult::ok(it),
+ None => ExpandResult::new(
+ tt::Subtree::empty(DelimSpan { open: span, close: span }),
+ ExpandError::other("failed to parse included file"),
+ ),
}
}
-pub(crate) fn include_arg_to_tt(
+pub fn include_input_to_file_id(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
-) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
- let loc = db.lookup_intern_macro_call(arg_id);
- let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
- panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
- };
- let path = parse_string(&arg.0)?;
- let file_id = relative_file(db, *arg_id, &path, false)?;
-
- let (subtree, map) =
- parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
- Ok((triomphe::Arc::new((subtree, map)), file_id))
+ arg: &tt::Subtree,
+) -> Result<FileId, ExpandError> {
+ relative_file(db, arg_id, &parse_string(arg)?, false)
}
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
- tt: &tt::Subtree,
+ _tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- if let Err(e) = parse_string(tt) {
- return ExpandResult::new(tt::Subtree::empty(), e);
- }
-
// FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(),
- span: tt::TokenId::unspecified(),
+ span,
}))],
};
ExpandResult::ok(res)
@@ -589,10 +646,13 @@ fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
// FIXME: we're not able to read excluded files (which is most of them because
@@ -602,14 +662,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
- return ExpandResult::ok(quote!(""));
+ return ExpandResult::ok(quote!(span =>""));
}
};
let text = db.file_text(file_id);
let text = &*text;
- ExpandResult::ok(quote!(#text))
+ ExpandResult::ok(quote!(span =>#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@@ -621,10 +681,13 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
let mut err = None;
@@ -641,7 +704,7 @@ fn env_expand(
// `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string()
});
- let expanded = quote! { #s };
+ let expanded = quote! {span => #s };
ExpandResult { value: expanded, err }
}
@@ -650,15 +713,18 @@ fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
- None => quote! { ::core::option::Option::None::<&str> },
- Some(s) => quote! { ::core::option::Option::Some(#s) },
+ None => quote! {span => ::core::option::Option::None::<&str> },
+ Some(s) => quote! {span => ::core::option::Option::Some(#s) },
};
ExpandResult::ok(expanded)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 5292a5fa1..935669d49 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,22 +1,31 @@
//! Defines database & queries for macro expansion.
-use base_db::{salsa, CrateId, Edition, SourceDatabase};
+use base_db::{
+ salsa::{self, debug::DebugQueryTable},
+ span::SyntaxContextId,
+ CrateId, Edition, FileId, SourceDatabase,
+};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
- ast::{self, HasAttrs, HasDocComments},
- AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
+ ast::{self, HasAttrs},
+ AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc;
use crate::{
- ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
- builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
- BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
- ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
- MacroDefKind, MacroFile, ProcMacroExpander,
+ ast_id_map::AstIdMap,
+ attrs::{collect_attrs, RawAttrs},
+ builtin_attr_macro::pseudo_derive_attr_expansion,
+ builtin_fn_macro::EagerExpander,
+ fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
+ hygiene::{apply_mark, SyntaxContextData, Transparency},
+ span::{RealSpanMap, SpanMap, SpanMapRef},
+ tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
+ ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
+ MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
- pub mac: mbe::DeclarativeMacro,
- pub def_site_token_map: mbe::TokenMap,
+ pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
+ pub transparency: Transparency,
}
impl DeclarativeMacroExpander {
- pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ pub fn expand(
+ &self,
+ db: &dyn ExpandDatabase,
+ tt: tt::Subtree,
+ call_id: MacroCallId,
+ ) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
- None => self.mac.expand(tt).map_err(Into::into),
+ None => self
+ .mac
+ .expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
+ .map_err(Into::into),
}
}
- pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
- self.mac.map_id_down(token_id)
- }
-
- pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
- self.mac.map_id_up(token_id)
+ pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ match self.mac.err() {
+ Some(e) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan::DUMMY),
+ ExpandError::other(format!("invalid macro definition: {e}")),
+ ),
+ None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
+ }
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
+ /// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander),
@@ -69,31 +89,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander),
}
-// FIXME: Get rid of these methods
-impl TokenExpander {
- pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
- match self {
- TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
- TokenExpander::BuiltIn(..)
- | TokenExpander::BuiltInEager(..)
- | TokenExpander::BuiltInAttr(..)
- | TokenExpander::BuiltInDerive(..)
- | TokenExpander::ProcMacro(..) => id,
- }
- }
-
- pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
- match self {
- TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
- TokenExpander::BuiltIn(..)
- | TokenExpander::BuiltInEager(..)
- | TokenExpander::BuiltInAttr(..)
- | TokenExpander::BuiltInDerive(..)
- | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
- }
- }
-}
-
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
// This query is LRU cached
fn parse_macro_expansion(
&self,
- macro_file: MacroFile,
- ) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
+ macro_file: MacroFileId,
+ ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
+ #[salsa::transparent]
+ fn span_map(&self, file_id: HirFileId) -> SpanMap;
+
+ fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental.
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
+ #[salsa::interned]
+ fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
- /// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
- fn macro_arg(
- &self,
- id: MacroCallId,
- ) -> ValueResult<
- Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
- Arc<Box<[SyntaxError]>>,
- >;
- /// Extracts syntax node, corresponding to a macro call. That's a firewall
+ fn setup_syntax_context_root(&self) -> ();
+ #[salsa::transparent]
+ fn dump_syntax_contexts(&self) -> String;
+
+ /// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
- fn macro_arg_node(
+ fn macro_arg(
&self,
id: MacroCallId,
- ) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
+ ) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro.
#[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
-
- /// Expand macro call to a token tree.
- // This query is LRU cached
- fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
- #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
- fn include_expand(
- &self,
- arg_id: MacroCallId,
- ) -> Result<
- (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
- ExpandError,
- >;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>;
+}
- fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
+#[inline]
+pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
+ HirFileIdRepr::MacroFile(m) => {
+ SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
+ }
+ }
+}
+
+pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
+ Arc::new(RealSpanMap::from_file(db, file_id))
}
/// This expands the given macro call, but with different arguments. This is
@@ -181,21 +178,36 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
- let token_range = token_to_map.text_range();
+
+ let span_map = RealSpanMap::absolute(FileId::BOGUS);
+ let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
- let censor = censor_for_macro_input(&loc, speculative_args);
- let mut fixups = fixup::fixup_syntax(speculative_args);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
- speculative_args,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
+ let (mut tt, undo_info) = match loc.kind {
+ MacroCallKind::FnLike { .. } => {
+ (mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
+ }
+ MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
+ let censor = censor_for_macro_input(&loc, speculative_args);
+ let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
+ fixups.append.retain(|it, _| match it {
+ syntax::NodeOrToken::Node(it) => !censor.contains(it),
+ syntax::NodeOrToken::Token(_) => true,
+ });
+ fixups.remove.extend(censor);
+ (
+ mbe::syntax_node_to_token_tree_modified(
+ speculative_args,
+ span_map,
+ fixups.append,
+ fixups.remove,
+ ),
+ fixups.undo_info,
+ )
+ }
+ };
- let (attr_arg, token_id) = match loc.kind {
+ let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
@@ -204,65 +216,51 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
- item.doc_comments_and_attrs()
+ collect_attrs(&item)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
}?;
match attr.token_tree() {
Some(token_tree) => {
- let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
- tree.delimiter = tt::Delimiter::unspecified();
-
- let shift = mbe::Shift::new(&tt);
- shift.shift_all(&mut tree);
-
- let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
- let attr_input_start =
- token_tree.left_delimiter_token()?.text_range().start();
- let range = token_range.checked_sub(attr_input_start)?;
- let token_id = shift.shift(map.token_by_range(range)?);
- Some(token_id)
- } else {
- None
- };
- (Some(tree), token_id)
- }
- _ => (None, None),
- }
- }
- _ => (None, None),
- };
- let token_id = match token_id {
- Some(token_id) => token_id,
- // token wasn't inside an attribute input so it has to be in the general macro input
- None => {
- let range = token_range.checked_sub(speculative_args.text_range().start())?;
- let token_id = spec_args_tmap.token_by_range(range)?;
- match loc.def.kind {
- MacroDefKind::Declarative(it) => {
- db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
+ let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
+ tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+
+ Some(tree)
}
- _ => token_id,
+ _ => None,
}
}
+ _ => None,
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
- tt.delimiter = tt::Delimiter::unspecified();
- expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ let call_site = loc.span(db);
+ expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &tt,
+ attr_arg.as_ref(),
+ call_site,
+ call_site,
+ call_site,
+ )
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
- pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
+ pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
}
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
- expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
+ expander.expand(db, actual_macro_call, &adt, span_map)
+ }
+ MacroDefKind::Declarative(it) => {
+ db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
}
- MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@@ -270,13 +268,14 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
- let expand_to = macro_expand_to(db, actual_macro_call);
- fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
+ let expand_to = loc.expand_to();
+
+ fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
- .ranges_by_token(token_id, token_to_map.kind())
+ .ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
@@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
- HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
+ HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
}
}
+// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
+// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
- macro_file: MacroFile,
-) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
+ macro_file: MacroFileId,
+) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion");
- let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
-
- let expand_to = macro_expand_to(db, macro_file.macro_call_id);
-
- tracing::debug!("expanded = {}", tt.as_debug_string());
- tracing::debug!("kind = {:?}", expand_to);
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let expand_to = loc.expand_to();
+ let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
@@ -333,51 +331,138 @@ fn parse_macro_expansion_error(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>> {
- db.parse_macro_expansion(MacroFile { macro_call_id })
+ db.parse_macro_expansion(MacroFileId { macro_call_id })
.map(|it| it.0.errors().to_vec().into_boxed_slice())
}
+fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
+ }
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let (parse, map) = db.parse_macro_expansion(macro_file).value;
+ (parse, SpanMap::ExpansionSpanMap(map))
+ }
+ }
+}
+
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
-) -> ValueResult<
- Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
- Arc<Box<[SyntaxError]>>,
-> {
- let loc = db.lookup_intern_macro_call(id);
-
- if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
- return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
- }
-
- let ValueResult { value, err } = db.macro_arg_node(id);
- let Some(arg) = value else {
- return ValueResult { value: None, err };
+ // FIXME: consider the following by putting fixup info into eager call info args
+ // ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
+) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
+ let mismatched_delimiters = |arg: &SyntaxNode| {
+ let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
+ let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
+ let well_formed_tt =
+ matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
+ if !well_formed_tt {
+ // Don't expand malformed (unbalanced) macro invocations. This is
+ // less than ideal, but trying to expand unbalanced macro calls
+ // sometimes produces pathological, deeply nested code which breaks
+ // all kinds of things.
+ //
+ // Some day, we'll have explicit recursion counters for all
+ // recursive things, at which point this code might be removed.
+ cov_mark::hit!(issue9358_bad_macro_stack_overflow);
+ Some(Arc::new(Box::new([SyntaxError::new(
+ "unbalanced token tree".to_owned(),
+ arg.text_range(),
+ )]) as Box<[_]>))
+ } else {
+ None
+ }
};
+ let loc = db.lookup_intern_macro_call(id);
+ if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
+ .then(|| loc.eager.as_deref())
+ .flatten()
+ {
+ ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
+ } else {
+ let (parse, map) = parse_with_map(db, loc.kind.file_id());
+ let root = parse.syntax_node();
+
+ let syntax = match loc.kind {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = &ast_id.to_ptr(db).to_node(&root);
+ let offset = node.syntax().text_range().start();
+ match node.token_tree() {
+ Some(tt) => {
+ let tt = tt.syntax();
+ if let Some(e) = mismatched_delimiters(tt) {
+ return ValueResult::only_err(e);
+ }
+ tt.clone()
+ }
+ None => {
+ return ValueResult::only_err(Arc::new(Box::new([
+ SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
+ ])));
+ }
+ }
+ }
+ MacroCallKind::Derive { ast_id, .. } => {
+ ast_id.to_ptr(db).to_node(&root).syntax().clone()
+ }
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
+ };
+ let (mut tt, undo_info) = match loc.kind {
+ MacroCallKind::FnLike { .. } => {
+ (mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
+ }
+ MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
+ let censor = censor_for_macro_input(&loc, &syntax);
+ let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
+ fixups.append.retain(|it, _| match it {
+ syntax::NodeOrToken::Node(it) => !censor.contains(it),
+ syntax::NodeOrToken::Token(_) => true,
+ });
+ fixups.remove.extend(censor);
+ {
+ let mut tt = mbe::syntax_node_to_token_tree_modified(
+ &syntax,
+ map.as_ref(),
+ fixups.append.clone(),
+ fixups.remove.clone(),
+ );
+ reverse_fixups(&mut tt, &fixups.undo_info);
+ }
+ (
+ mbe::syntax_node_to_token_tree_modified(
+ &syntax,
+ map,
+ fixups.append,
+ fixups.remove,
+ ),
+ fixups.undo_info,
+ )
+ }
+ };
- let node = SyntaxNode::new_root(arg);
- let censor = censor_for_macro_input(&loc, &node);
- let mut fixups = fixup::fixup_syntax(&node);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
- &node,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
+ if loc.def.is_proc_macro() {
+ // proc macros expect their inputs without parentheses, MBEs expect it with them included
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ }
- if loc.def.is_proc_macro() {
- // proc macros expect their inputs without parentheses, MBEs expect it with them included
- tt.delimiter = tt::Delimiter::unspecified();
- }
- let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
- match err {
- Some(err) => ValueResult::new(val, err),
- None => ValueResult::ok(val),
+ if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
+ match parse.errors() {
+ [] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
+ errors => ValueResult::new(
+ Some((Arc::new(tt), undo_info)),
+ // Box::<[_]>::from(res.errors()), not stable yet
+ Arc::new(errors.to_vec().into_boxed_slice()),
+ ),
+ }
+ } else {
+ ValueResult::ok(Some((Arc::new(tt), undo_info)))
+ }
}
}
+// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped
@@ -403,10 +488,9 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring);
- ast::Item::cast(node.clone())?
- .doc_comments_and_attrs()
+ collect_attrs(&ast::Item::cast(node.clone())?)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
.map(|attr| attr.syntax().clone())
.into_iter()
.collect()
@@ -417,103 +501,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
-fn macro_arg_node(
- db: &dyn ExpandDatabase,
- id: MacroCallId,
-) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
- let err = || -> Arc<Box<[_]>> {
- Arc::new(Box::new([SyntaxError::new_at_offset(
- "invalid macro call".to_owned(),
- syntax::TextSize::from(0),
- )]))
- };
- let loc = db.lookup_intern_macro_call(id);
- let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
- let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
- Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
- } else {
- loc.kind
- .arg(db)
- .and_then(|arg| ast::TokenTree::cast(arg.value))
- .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
- };
- match res {
- Some(res) if res.errors().is_empty() => res.syntax_node(),
- Some(res) => {
- return ValueResult::new(
- Some(res.syntax_node().green().into()),
- // Box::<[_]>::from(res.errors()), not stable yet
- Arc::new(res.errors().to_vec().into_boxed_slice()),
- );
- }
- None => return ValueResult::only_err(err()),
- }
- } else {
- match loc.kind.arg(db) {
- Some(res) => res.value,
- None => return ValueResult::only_err(err()),
- }
- };
- if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
- let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
- let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
- let well_formed_tt =
- matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
- if !well_formed_tt {
- // Don't expand malformed (unbalanced) macro invocations. This is
- // less than ideal, but trying to expand unbalanced macro calls
- // sometimes produces pathological, deeply nested code which breaks
- // all kinds of things.
- //
- // Some day, we'll have explicit recursion counters for all
- // recursive things, at which point this code might be removed.
- cov_mark::hit!(issue9358_bad_macro_stack_overflow);
- return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
- "unbalanced token tree".to_owned(),
- arg.text_range(),
- )])));
- }
- }
- ValueResult::ok(Some(arg.green().into()))
-}
-
fn decl_macro_expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
- let (mac, def_site_token_map) = match id.to_node(db) {
- ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
- Some(arg) => {
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
- (mac, def_site_token_map)
- }
- None => (
- mbe::DeclarativeMacro::from_err(
+ let (root, map) = parse_with_map(db, id.file_id);
+ let root = root.syntax_node();
+
+ let transparency = |node| {
+ // ... would be nice to have the item tree here
+ let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
+ match &*attrs
+ .iter()
+ .find(|it| {
+ it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
+ })?
+ .token_tree_value()?
+ .token_trees
+ {
+ [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
+ "transparent" => Some(Transparency::Transparent),
+ "semitransparent" => Some(Transparency::SemiTransparent),
+ "opaque" => Some(Transparency::Opaque),
+ _ => None,
+ },
+ _ => None,
+ }
+ };
+
+ let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
+ ast::Macro::MacroRules(macro_rules) => (
+ match macro_rules.token_tree() {
+ Some(arg) => {
+ let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
+ mac
+ }
+ None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
- Default::default(),
- ),
- },
- ast::Macro::MacroDef(macro_def) => match macro_def.body() {
- Some(arg) => {
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
- (mac, def_site_token_map)
- }
- None => (
- mbe::DeclarativeMacro::from_err(
+ },
+ transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
+ ),
+ ast::Macro::MacroDef(macro_def) => (
+ match macro_def.body() {
+ Some(arg) => {
+ let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
+ mac
+ }
+ None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
- Default::default(),
- ),
- },
+ },
+ transparency(&macro_def).unwrap_or(Transparency::Opaque),
+ ),
};
- Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
+ Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@@ -529,39 +577,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
}
}
-fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
+fn macro_expand(
+ db: &dyn ExpandDatabase,
+ macro_call_id: MacroCallId,
+ loc: MacroCallLoc,
+) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
- let loc = db.lookup_intern_macro_call(id);
let ExpandResult { value: tt, mut err } = match loc.def.kind {
- MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
+ MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
- let arg = db.macro_arg_node(id).value.unwrap();
-
- let node = SyntaxNode::new_root(arg);
- let censor = censor_for_macro_input(&loc, &node);
- let mut fixups = fixup::fixup_syntax(&node);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
- &node,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
-
- // this cast is a bit sus, can we avoid losing the typedness here?
- let adt = ast::Adt::cast(node).unwrap();
- let mut res = expander.expand(db, id, &adt, &tmap);
- fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
- res
+ let (root, map) = parse_with_map(db, loc.kind.file_id());
+ let root = root.syntax_node();
+ let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
+ let node = ast_id.to_ptr(db).to_node(&root);
+
+ // FIXME: Use censoring
+ let _censor = censor_for_macro_input(&loc, node.syntax());
+ expander.expand(db, macro_call_id, &node, map.as_ref())
}
_ => {
- let ValueResult { value, err } = db.macro_arg(id);
- let Some(macro_arg) = value else {
+ let ValueResult { value, err } = db.macro_arg(macro_call_id);
+ let Some((macro_arg, undo_info)) = value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@@ -570,12 +610,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
};
};
- let (arg, arg_tm, undo_info) = &*macro_arg;
- let mut res = match loc.def.kind {
+ let arg = &*macro_arg;
+ match loc.def.kind {
MacroDefKind::Declarative(id) => {
- db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
+ db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
+ }
+ MacroDefKind::BuiltIn(it, _) => {
+ it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
- MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@@ -583,11 +625,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
- let mut arg = arg.clone();
- fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
-
return ExpandResult {
- value: Arc::new(arg),
+ value: macro_arg.clone(),
err: err.map(|err| {
let mut buf = String::new();
for err in &**err {
@@ -600,12 +639,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}),
};
}
- MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
- MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
+ MacroDefKind::BuiltInEager(it, _) => {
+ it.expand(db, macro_call_id, &arg).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInAttr(it, _) => {
+ let mut res = it.expand(db, macro_call_id, &arg);
+ fixup::reverse_fixups(&mut res.value, &undo_info);
+ res
+ }
_ => unreachable!(),
- };
- fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
- res
+ }
}
};
@@ -614,9 +657,12 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
err = error.clone().or(err);
}
- // Set a hard limit for the expanded tt
- if let Err(value) = check_tt_count(&tt) {
- return value;
+ // Skip checking token tree limit for include! macro call
+ if !loc.def.is_include() {
+ // Set a hard limit for the expanded tt
+ if let Err(value) = check_tt_count(&tt) {
+ return value;
+ }
}
ExpandResult { value: Arc::new(tt), err }
@@ -624,10 +670,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
- let Some(macro_arg) = db.macro_arg(id).value else {
+ let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@@ -636,47 +682,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
};
};
- let (arg_tt, arg_tm, undo_info) = &*macro_arg;
-
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
};
let attr_arg = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => {
- let mut attr_args = attr_args.0.clone();
- mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
- Some(attr_args)
- }
+ MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
_ => None,
};
- let ExpandResult { value: mut tt, err } =
- expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
+ let call_site = loc.span(db);
+ let ExpandResult { value: mut tt, err } = expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &macro_arg,
+ attr_arg,
+ // FIXME
+ call_site,
+ call_site,
+ // FIXME
+ call_site,
+ );
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
- fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
+ fixup::reverse_fixups(&mut tt, &undo_info);
ExpandResult { value: Arc::new(tt), err }
}
-fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
- Arc::new(HygieneFrame::new(db, file_id))
-}
-
-fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
- db.lookup_intern_macro_call(id).expand_to()
-}
-
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
-) -> (Parse<SyntaxNode>, mbe::TokenMap) {
+) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@@ -692,7 +735,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![],
}),
err: Some(ExpandError::other(format!(
@@ -705,3 +748,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(())
}
}
+
+fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
+ db.intern_syntax_context(SyntaxContextData::root());
+}
+
+fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
+ let mut s = String::from("Expansions:");
+ let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
+ entries.sort_by_key(|e| e.key);
+ for e in entries {
+ let id = e.key;
+ let expn_data = e.value.as_ref().unwrap();
+ s.push_str(&format!(
+ "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
+ id,
+ expn_data.kind.file_id(),
+ expn_data.call_site,
+ SyntaxContextId::ROOT, // FIXME expn_data.def_site,
+ expn_data.kind.descr(),
+ ));
+ }
+
+ s.push_str("\n\nSyntaxContexts:\n");
+ let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
+ entries.sort_by_key(|e| e.key);
+ for e in entries {
+ struct SyntaxContextDebug<'a>(
+ &'a dyn ExpandDatabase,
+ SyntaxContextId,
+ &'a SyntaxContextData,
+ );
+
+ impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.2.fancy_debug(self.1, self.0, f)
+ }
+ }
+ stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
+ }
+ s
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index 4110f2847..8d55240ae 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -18,18 +18,17 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
-use base_db::CrateId;
-use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
+use base_db::{span::SyntaxContextId, CrateId};
+use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
- hygiene::Hygiene,
mod_path::ModPath,
- EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
- MacroCallLoc, MacroDefId, MacroDefKind,
+ span::SpanMapRef,
+ EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
+ MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
+ call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
@@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
krate,
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
+ call_site,
});
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
- // we need this map here as the expansion of the eager input fake file loses whitespace ...
- let mut ws_mapping = FxHashMap::default();
- if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
- ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
- Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
- }));
- }
+
+ let mut arg_map = ExpansionSpanMap::empty();
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
db,
- &Hygiene::new(db, macro_call.file_id),
+ &arg_exp_map,
+ &mut arg_map,
+ TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate,
+ call_site,
resolver,
)
};
let err = parse_err.or(err);
+ if cfg!(debug_assertions) {
+ arg_map.finish();
+ }
- let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
+ let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err };
};
- let (mut subtree, expanded_eager_input_token_map) =
- mbe::syntax_node_to_token_tree(&expanded_eager_input);
+ let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
- let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
- let mut ids_used = FxHashSet::default();
- let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
- // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
- // so we need to remap them to the original input of the eager macro.
- subtree.visit_ids(&mut |id| {
- // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
-
- if let Some(range) = expanded_eager_input_token_map
- .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
- {
- // remap from expanded eager input to eager input expansion
- if let Some(og_range) = mapping.get(&range) {
- // remap from eager input expansion to original eager input
- if let Some(&og_range) = ws_mapping.get(og_range) {
- if let Some(og_token) = og_tmap.token_by_range(og_range) {
- ids_used.insert(og_token);
- return og_token;
- }
- }
- }
- }
- tt::TokenId::UNSPECIFIED
- });
- og_tmap.filter(|id| ids_used.contains(&id));
- og_tmap
- } else {
- Default::default()
- };
- subtree.delimiter = crate::tt::Delimiter::unspecified();
+ subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
let loc = MacroCallLoc {
def,
krate,
- eager: Some(Box::new(EagerCallInfo {
- arg: Arc::new((subtree, og_tmap)),
- arg_id,
- error: err.clone(),
- })),
+ eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
+ call_site,
};
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@@ -132,12 +101,13 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
-) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
+ call_site: SyntaxContextId,
+) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id);
- let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
+ let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
@@ -146,57 +116,59 @@ fn lazy_expand(
fn eager_macro_recur(
db: &dyn ExpandDatabase,
- hygiene: &Hygiene,
+ span_map: &ExpansionSpanMap,
+ expanded_map: &mut ExpansionSpanMap,
+ mut offset: TextSize,
curr: InFile<SyntaxNode>,
krate: CrateId,
+ call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
+) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();
- let mut mapping = FxHashMap::default();
let mut replacements = Vec::new();
// FIXME: We only report a single error inside of eager expansions
let mut error = None;
- let mut offset = 0i32;
- let apply_offset = |it: TextSize, offset: i32| {
- TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
- };
let mut children = original.preorder_with_tokens();
// Collect replacement
while let Some(child) = children.next() {
- let WalkEvent::Enter(child) = child else { continue };
let call = match child {
- syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
+ WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
Some(it) => {
children.skip_subtree();
it
}
- None => continue,
+ _ => continue,
},
- syntax::NodeOrToken::Token(t) => {
- mapping.insert(
- TextRange::new(
- apply_offset(t.text_range().start(), offset),
- apply_offset(t.text_range().end(), offset),
- ),
- t.text_range(),
- );
+ WalkEvent::Enter(_) => continue,
+ WalkEvent::Leave(child) => {
+ if let SyntaxElement::Token(t) = child {
+ let start = t.text_range().start();
+ offset += t.text_range().len();
+ expanded_map.push(offset, span_map.span_at(start));
+ }
continue;
}
};
- let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+
+ let def = match call
+ .path()
+ .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
+ {
Some(path) => match macro_resolver(path.clone()) {
Some(def) => def,
None => {
error =
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
+ offset += call.syntax().text_range().len();
continue;
}
},
None => {
error = Some(ExpandError::other("malformed macro invocation"));
+ offset += call.syntax().text_range().len();
continue;
}
};
@@ -207,29 +179,22 @@ fn eager_macro_recur(
krate,
curr.with_value(call.clone()),
def,
+ call_site,
macro_resolver,
);
match value {
Some(call_id) => {
- let ExpandResult { value, err: err2 } =
+ let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
- if let Some(tt) = call.token_tree() {
- let call_tt_start = tt.syntax().text_range().start();
- let call_start =
- apply_offset(call.syntax().text_range().start(), offset);
- if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
- mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
- value
- .1
- .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
- .map(|r| (r + call_start, range + call_tt_start))
- }));
- }
- }
+ map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
+ let syntax_node = parse.syntax_node();
ExpandResult {
- value: Some(value.0.syntax_node().clone_for_update()),
+ value: Some((
+ syntax_node.clone_for_update(),
+ offset + syntax_node.text_range().len(),
+ )),
err: err.or(err2),
}
}
@@ -242,45 +207,23 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } =
- lazy_expand(db, &def, curr.with_value(call.clone()), krate);
- let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
- Some(db.decl_macro_expander(def.krate, ast_id))
- } else {
- None
- };
+ lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
// replace macro inside
- let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
- &hygiene,
+ &tm,
+ expanded_map,
+ offset,
// FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()),
krate,
+ call_site,
macro_resolver,
);
let err = err.or(error);
- if let Some(tt) = call.token_tree() {
- let call_tt_start = tt.syntax().text_range().start();
- let call_start = apply_offset(call.syntax().text_range().start(), offset);
- if let Some((_tt, arg_map, _)) = parse
- .file_id
- .macro_file()
- .and_then(|id| db.macro_arg(id.macro_call_id).value)
- .as_deref()
- {
- mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
- tm.first_range_by_token(
- decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
- syntax::SyntaxKind::TOMBSTONE,
- )
- .map(|r| (r + call_start, range + call_tt_start))
- }));
- }
- }
- // FIXME: Do we need to re-use _m here?
- ExpandResult { value: value.map(|(n, _m)| n), err }
+ ExpandResult { value, err }
}
};
if err.is_some() {
@@ -288,16 +231,18 @@ fn eager_macro_recur(
}
// check if the whole original syntax is replaced
if call.syntax() == &original {
- return ExpandResult { value: value.zip(Some(mapping)), err: error };
+ return ExpandResult { value, err: error };
}
- if let Some(insert) = value {
- offset += u32::from(insert.text_range().len()) as i32
- - u32::from(call.syntax().text_range().len()) as i32;
- replacements.push((call, insert));
+ match value {
+ Some((insert, new_offset)) => {
+ replacements.push((call, insert));
+ offset = new_offset;
+ }
+ None => offset += call.syntax().text_range().len(),
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
- ExpandResult { value: Some((original, mapping)), err: error }
+ ExpandResult { value: Some((original, offset)), err: error }
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
new file mode 100644
index 000000000..89f0685d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
@@ -0,0 +1,375 @@
+//! Things to wrap other things in file ids.
+use std::iter;
+
+use base_db::{
+ span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
+ FileId, FileRange,
+};
+use either::Either;
+use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
+
+use crate::{db, ExpansionInfo, MacroFileIdExt};
+
+/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
+///
+/// Typical usages are:
+///
+/// * `InFile<SyntaxNode>` -- syntax node in a file
+/// * `InFile<ast::FnDef>` -- ast node in a file
+/// * `InFile<TextSize>` -- offset in a file
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InFileWrapper<FileKind, T> {
+ pub file_id: FileKind,
+ pub value: T,
+}
+pub type InFile<T> = InFileWrapper<HirFileId, T>;
+pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
+pub type InRealFile<T> = InFileWrapper<FileId, T>;
+
+impl<FileKind, T> InFileWrapper<FileKind, T> {
+ pub fn new(file_id: FileKind, value: T) -> Self {
+ Self { file_id, value }
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
+ InFileWrapper::new(self.file_id, f(self.value))
+ }
+}
+
+impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
+ pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
+ InFileWrapper::new(self.file_id, value)
+ }
+
+ pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
+ self.with_value(&self.value)
+ }
+}
+
+impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
+ pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
+ self.with_value(self.value.clone())
+ }
+}
+
+impl<T> From<InMacroFile<T>> for InFile<T> {
+ fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
+ InFile { file_id: file_id.into(), value }
+ }
+}
+
+impl<T> From<InRealFile<T>> for InFile<T> {
+ fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
+ InFile { file_id: file_id.into(), value }
+ }
+}
+
+// region:transpose impls
+
+impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
+ pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
+ Some(InFileWrapper::new(self.file_id, self.value?))
+ }
+}
+
+impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
+ pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
+ match self.value {
+ Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
+ Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
+ }
+ }
+}
+
+// endregion:transpose impls
+
+trait FileIdToSyntax: Copy {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
+}
+
+impl FileIdToSyntax for FileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse(self).syntax_node()
+ }
+}
+impl FileIdToSyntax for MacroFileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse_macro_expansion(self).value.0.syntax_node()
+ }
+}
+impl FileIdToSyntax for HirFileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse_or_expand(self)
+ }
+}
+
+#[allow(private_bounds)]
+impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
+ pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ FileIdToSyntax::file_syntax(self.file_id, db)
+ }
+}
+
+impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
+ pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
+
+// region:specific impls
+
+impl InFile<&SyntaxNode> {
+ /// Skips the attributed item that caused the macro invocation we are climbing up
+ pub fn ancestors_with_macros_skip_attr_item(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let macro_file_id = node.file_id.macro_file()?;
+ let parent_node = macro_file_id.call_node(db);
+ if macro_file_id.is_attr_macro(db) {
+ // macro call was an attributed item, skip it
+ // FIXME: does this fail if this is a direct expansion of another macro?
+ parent_node.map(|node| node.parent()).transpose()
+ } else {
+ Some(parent_node)
+ }
+ }
+ };
+ iter::successors(succ(&self.cloned()), succ)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ ///
+ /// For attributes and derives, this will point back to the attribute only.
+ /// For the entire item use [`InFile::original_file_range_full`].
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some((res, ctxt)) =
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ {
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return res;
+ }
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some((res, ctxt)) =
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ {
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return res;
+ }
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range_with_body(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ }
+ }
+ }
+
+ pub fn original_syntax_node(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<InRealFile<SyntaxNode>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ let file_id = match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ return Some(InRealFile { file_id, value: self.value.clone() })
+ }
+ HirFileIdRepr::MacroFile(m) => m,
+ };
+ if !file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ let (FileRange { file_id, range }, ctx) =
+ ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if !ctx.is_root() {
+ return None;
+ }
+
+ let anc = db.parse(file_id).syntax_node().covering_element(range);
+ let kind = self.value.kind();
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
+ let value = anc.ancestors().find(|it| it.kind() == kind)?;
+ Some(InRealFile::new(file_id, value))
+ }
+}
+
+impl InMacroFile<SyntaxToken> {
+ pub fn upmap_once(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
+ self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range())
+ }
+}
+
+impl InFile<SyntaxToken> {
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ let (range, ctxt) = ExpansionInfo::new(db, mac_file)
+ .span_for_offset(db, self.value.text_range().start());
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return range;
+ }
+
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some(FileRange { file_id, range: self.value.text_range() })
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ let (range, ctxt) = ExpansionInfo::new(db, mac_file)
+ .span_for_offset(db, self.value.text_range().start());
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ Some(range)
+ } else {
+ None
+ }
+ }
+ }
+ }
+}
+
+impl InMacroFile<TextSize> {
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
+ ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
+ }
+}
+
+impl InFile<TextRange> {
+ pub fn original_node_file_range(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> (FileRange, SyntaxContextId) {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ Some(it) => it,
+ None => {
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ (loc.kind.original_call_range(db), SyntaxContextId::ROOT)
+ }
+ }
+ }
+ }
+ }
+
+ pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ Some((it, SyntaxContextId::ROOT)) => it,
+ _ => {
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+ }
+ }
+
+ pub fn original_node_file_range_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
+ }
+ }
+ }
+}
+
+impl<N: AstNode> InFile<N> {
+ pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ let file_id = match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ return Some(InRealFile { file_id, value: self.value })
+ }
+ HirFileIdRepr::MacroFile(m) => m,
+ };
+ if !file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
+ .map_node_range_up(db, self.value.syntax().text_range())?;
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if !ctx.is_root() {
+ return None;
+ }
+
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
+ let anc = db.parse(file_id).syntax_node().covering_element(range);
+ let value = anc.ancestors().find_map(N::cast)?;
+ Some(InRealFile::new(file_id, value))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index e6e8d8c02..346cd39a7 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -1,111 +1,126 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
-use std::mem;
-use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
-use rustc_hash::FxHashMap;
+use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData},
+ FileId,
+};
+use la_arena::RawIdx;
+use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
+use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
- match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+use triomphe::Arc;
+use tt::{Spacing, Span};
+
+use crate::{
+ span::SpanMapRef,
+ tt::{Ident, Leaf, Punct, Subtree},
};
-use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
- pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
+ pub(crate) remove: FxHashSet<SyntaxNode>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
- pub(crate) token_map: TokenMap,
- pub(crate) next_id: u32,
}
/// This is the information needed to reverse the fixups.
-#[derive(Debug, Default, PartialEq, Eq)]
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
- original: Box<[Subtree]>,
+ // FIXME: ThinArc<[Subtree]>
+ original: Option<Arc<Box<[Subtree]>>>,
+}
+
+impl SyntaxFixupUndoInfo {
+ pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
-const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+// censoring -> just don't convert the node
+// replacement -> censor + append
+// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
+// to remove later
+const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
+const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
+const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
+const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
-pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
- let mut replace = FxHashMap::<SyntaxElement, _>::default();
+ let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
- let mut token_map = TokenMap::default();
- let mut next_id = 0;
+ let dummy_range = FIXUP_DUMMY_RANGE;
+ // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
+ // the index into the replacement vec but only if the end points to !0
+ let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
+ let fake_span = |range| SpanData {
+ range: dummy_range,
+ anchor: dummy_anchor,
+ ctx: span_map.span_for_range(range).ctx,
+ };
while let Some(event) = preorder.next() {
- let node = match event {
- syntax::WalkEvent::Enter(node) => node,
- syntax::WalkEvent::Leave(_) => continue,
- };
+ let syntax::WalkEvent::Enter(node) = event else { continue };
+ let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) {
+ remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
- let (original_tree, new_tmap, new_next_id) =
- mbe::syntax_node_to_token_tree_with_modifications(
- &node,
- mem::take(&mut token_map),
- next_id,
- Default::default(),
- Default::default(),
- );
- token_map = new_tmap;
- next_id = new_next_id;
+ let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
let idx = original.len() as u32;
original.push(original_tree);
- let replacement = SyntheticToken {
- kind: SyntaxKind::IDENT,
+ let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: node.text_range(),
- id: SyntheticTokenId(idx),
- };
- replace.insert(node.clone().into(), vec![replacement]);
+ span: SpanData {
+ range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
+ anchor: dummy_anchor,
+ ctx: span_map.span_for_range(node_range).ctx,
+ },
+ });
+ append.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree();
continue;
}
+
// In some other situations, we can fix things by just appending some tokens.
- let end_range = TextRange::empty(node.text_range().end());
match_ast! {
match node {
ast::FieldExpr(it) => {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range),
+ }),
]);
}
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range),
+ }),
]);
}
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -117,28 +132,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(if_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -150,46 +162,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(while_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -201,29 +209,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue
};
append.insert(match_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.match_arm_list().is_none() {
// No match arms
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -234,10 +239,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
};
let [pat, in_token, iter] = [
- (SyntaxKind::UNDERSCORE, "_"),
- (SyntaxKind::IN_KW, "in"),
- (SyntaxKind::IDENT, "__ra_fixup")
- ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
+ "_",
+ "in",
+ "__ra_fixup"
+ ].map(|text|
+ Leaf::Ident(Ident {
+ text: text.into(),
+ span: fake_span(node_range)
+ }),
+ );
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]);
@@ -248,18 +258,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -267,12 +276,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}
}
}
+ let needs_fixups = !append.is_empty() || !original.is_empty();
SyntaxFixups {
append,
- replace,
- token_map,
- next_id,
- undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
+ remove,
+ undo_info: SyntaxFixupUndoInfo {
+ original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
+ },
}
}
@@ -288,36 +298,57 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
-pub(crate) fn reverse_fixups(
- tt: &mut Subtree,
- token_map: &TokenMap,
- undo_info: &SyntaxFixupUndoInfo,
-) {
+pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
+ let Some(undo_info) = undo_info.original.as_deref() else { return };
+ let undo_info = &**undo_info;
+ if never!(
+ tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ ) {
+ tt.delimiter.close = SpanData::DUMMY;
+ tt.delimiter.open = SpanData::DUMMY;
+ }
+ reverse_fixups_(tt, undo_info);
+}
+
+fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts
.into_iter()
+ // delete all fake nodes
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
- token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
- }
- tt::TokenTree::Subtree(st) => {
- token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
+ let span = leaf.span();
+ let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
+ let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
+ is_real_leaf || is_replaced_node
}
+ tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
- reverse_fixups(&mut tt, token_map, undo_info);
+ if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ {
+ // Even though fixup never creates subtrees with fixup spans, the old proc-macro server
+ // might copy them if the proc-macro asks for it, so we need to filter those out
+ // here as well.
+ return SmallVec::new_const();
+ }
+ reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
- if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
- let original = undo_info.original[id.0 as usize].clone();
+ if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
+ // we have a fake node here, we need to replace it again with the original
+ let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
} else {
SmallVec::from_const([original.into()])
}
} else {
+ // just a normal leaf
SmallVec::from_const([leaf.into()])
}
}
@@ -327,11 +358,15 @@ pub(crate) fn reverse_fixups(
#[cfg(test)]
mod tests {
+ use base_db::FileId;
use expect_test::{expect, Expect};
+ use triomphe::Arc;
- use crate::tt;
-
- use super::reverse_fixups;
+ use crate::{
+ fixup::reverse_fixups,
+ span::{RealSpanMap, SpanMap},
+ tt,
+ };
// The following three functions are only meant to check partial structural equivalence of
// `TokenTree`s, see the last assertion in `check()`.
@@ -361,13 +396,13 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
- let fixups = super::fixup_syntax(&parsed.syntax_node());
- let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
+ let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
+ let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
+ span_map.as_ref(),
fixups.append,
+ fixups.remove,
);
let actual = format!("{tt}\n");
@@ -383,14 +418,15 @@ mod tests {
parse.syntax_node()
);
- reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+ reverse_fixups(&mut tt, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing.
- let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
+ let original_as_tt =
+ mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
assert!(
check_subtree_eq(&tt, &original_as_tt),
- "different token tree: {tt:?},\n{original_as_tt:?}"
+ "different token tree:\n{tt:?}\n\n{original_as_tt:?}"
);
}
@@ -403,7 +439,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {for _ in __ra_fixup {}}
+fn foo () {for _ in __ra_fixup { }}
"#]],
)
}
@@ -431,7 +467,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {for bar in qux {}}
+fn foo () {for bar in qux { }}
"#]],
)
}
@@ -462,7 +498,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {match __ra_fixup {}}
+fn foo () {match __ra_fixup { }}
"#]],
)
}
@@ -494,7 +530,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {match __ra_fixup {}}
+fn foo () {match __ra_fixup { }}
"#]],
)
}
@@ -609,7 +645,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if a {}}
+fn foo () {if a { }}
"#]],
)
}
@@ -623,7 +659,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if __ra_fixup {}}
+fn foo () {if __ra_fixup { }}
"#]],
)
}
@@ -637,7 +673,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if __ra_fixup {} {}}
+fn foo () {if __ra_fixup {} { }}
"#]],
)
}
@@ -651,7 +687,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {while __ra_fixup {}}
+fn foo () {while __ra_fixup { }}
"#]],
)
}
@@ -665,7 +701,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {while foo {}}
+fn foo () {while foo { }}
"#]],
)
}
@@ -692,7 +728,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {loop {}}
+fn foo () {loop { }}
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index ca65db113..7b03709ac 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -2,252 +2,247 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
-use base_db::CrateId;
-use db::TokenExpander;
-use either::Either;
-use mbe::Origin;
-use syntax::{
- ast::{self, HasDocComments},
- AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
-};
-use triomphe::Arc;
-
-use crate::{
- db::{self, ExpandDatabase},
- fixup,
- name::{AsName, Name},
- HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
-};
-
-#[derive(Clone, Debug)]
-pub struct Hygiene {
- frames: Option<HygieneFrames>,
+use std::iter;
+
+use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
+
+use crate::db::ExpandDatabase;
+
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
+pub struct SyntaxContextData {
+ pub outer_expn: Option<MacroCallId>,
+ pub outer_transparency: Transparency,
+ pub parent: SyntaxContextId,
+ /// This context, but with all transparent and semi-transparent expansions filtered away.
+ pub opaque: SyntaxContextId,
+ /// This context, but with all transparent expansions filtered away.
+ pub opaque_and_semitransparent: SyntaxContextId,
}
-impl Hygiene {
- pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
- Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
+impl std::fmt::Debug for SyntaxContextData {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("SyntaxContextData")
+ .field("outer_expn", &self.outer_expn)
+ .field("outer_transparency", &self.outer_transparency)
+ .field("parent", &self.parent)
+ .field("opaque", &self.opaque)
+ .field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
+ .finish()
}
+}
- pub fn new_unhygienic() -> Hygiene {
- Hygiene { frames: None }
+impl SyntaxContextData {
+ pub fn root() -> Self {
+ SyntaxContextData {
+ outer_expn: None,
+ outer_transparency: Transparency::Opaque,
+ parent: SyntaxContextId::ROOT,
+ opaque: SyntaxContextId::ROOT,
+ opaque_and_semitransparent: SyntaxContextId::ROOT,
+ }
}
- // FIXME: this should just return name
- pub fn name_ref_to_name(
- &self,
+ pub fn fancy_debug(
+ self,
+ self_id: SyntaxContextId,
db: &dyn ExpandDatabase,
- name_ref: ast::NameRef,
- ) -> Either<Name, CrateId> {
- if let Some(frames) = &self.frames {
- if name_ref.text() == "$crate" {
- if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
- return Either::Right(krate);
- }
+ f: &mut std::fmt::Formatter<'_>,
+ ) -> std::fmt::Result {
+ write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
+ match self.outer_expn {
+ Some(id) => {
+ write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
}
+ None => write!(f, "root")?,
}
-
- Either::Left(name_ref.as_name())
+ write!(f, ", {:?})", self.outer_transparency)
}
+}
- pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
- let mut token = path.syntax().first_token()?.text_range();
- let frames = self.frames.as_ref()?;
- let mut current = &frames.0;
-
- loop {
- let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
- if origin == Origin::Def {
- return if current.local_inner {
- frames.root_crate(db, path.syntax())
- } else {
- None
- };
- }
- current = current.call_site.as_ref()?;
- token = mapped.value;
- }
- }
+/// A property of a macro expansion that determines how identifiers
+/// produced by that expansion are resolved.
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
+pub enum Transparency {
+ /// Identifier produced by a transparent expansion is always resolved at call-site.
+ /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
+ Transparent,
+ /// Identifier produced by a semi-transparent expansion may be resolved
+ /// either at call-site or at definition-site.
+ /// If it's a local variable, label or `$crate` then it's resolved at def-site.
+ /// Otherwise it's resolved at call-site.
+ /// `macro_rules` macros behave like this, built-in macros currently behave like this too,
+ /// but that's an implementation detail.
+ SemiTransparent,
+ /// Identifier produced by an opaque expansion is always resolved at definition-site.
+ /// Def-site spans in procedural macros, identifiers from `macro` by default use this.
+ Opaque,
}
-#[derive(Clone, Debug)]
-struct HygieneFrames(Arc<HygieneFrame>);
+pub fn span_with_def_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
+}
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub struct HygieneFrame {
- expansion: Option<HygieneInfo>,
+pub fn span_with_call_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
+}
- // Indicate this is a local inner macro
- local_inner: bool,
- krate: Option<CrateId>,
+pub fn span_with_mixed_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
+}
- call_site: Option<Arc<HygieneFrame>>,
- def_site: Option<Arc<HygieneFrame>>,
+fn span_with_ctxt_from_mark(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+ transparency: Transparency,
+) -> SpanData {
+ SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
}
-impl HygieneFrames {
- fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
- // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
- // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
- HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
+pub(super) fn apply_mark(
+ db: &dyn ExpandDatabase,
+ ctxt: SyntaxContextId,
+ call_id: MacroCallId,
+ transparency: Transparency,
+) -> SyntaxContextId {
+ if transparency == Transparency::Opaque {
+ return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
- fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
- let mut token = node.first_token()?.text_range();
- let mut result = self.0.krate;
- let mut current = self.0.clone();
-
- while let Some((mapped, origin)) =
- current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
- {
- result = current.krate;
-
- let site = match origin {
- Origin::Def => &current.def_site,
- Origin::Call => &current.call_site,
- };
+ let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
+ let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
+ call_site_ctxt.normalize_to_macros_2_0(db)
+ } else {
+ call_site_ctxt.normalize_to_macro_rules(db)
+ };
- let site = match site {
- None => break,
- Some(it) => it,
- };
-
- current = site.clone();
- token = mapped.value;
- }
+ if call_site_ctxt.is_root() {
+ return apply_mark_internal(db, ctxt, Some(call_id), transparency);
+ }
- result
+ // Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
+ // macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
+ //
+ // In this case, the tokens from the macros 1.0 definition inherit the hygiene
+ // at their invocation. That is, we pretend that the macros 1.0 definition
+ // was defined at its invocation (i.e., inside the macros 2.0 definition)
+ // so that the macros 2.0 definition remains hygienic.
+ //
+ // See the example at `test/ui/hygiene/legacy_interaction.rs`.
+ for (call_id, transparency) in ctxt.marks(db) {
+ call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
}
+ apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
}
-#[derive(Debug, Clone, PartialEq, Eq)]
-struct HygieneInfo {
- file: MacroFile,
- /// The start offset of the `macro_rules!` arguments or attribute input.
- attr_input_or_mac_def_start: Option<InFile<TextSize>>,
+fn apply_mark_internal(
+ db: &dyn ExpandDatabase,
+ ctxt: SyntaxContextId,
+ call_id: Option<MacroCallId>,
+ transparency: Transparency,
+) -> SyntaxContextId {
+ let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
+ let mut opaque = syntax_context_data.opaque;
+ let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
+
+ if transparency >= Transparency::Opaque {
+ let parent = opaque;
+ let new_opaque = SyntaxContextId::SELF_REF;
+ // But we can't just grab the to be allocated ID either as that would not deduplicate
+ // things!
+ // So we need a new salsa store type here ...
+ opaque = db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque: new_opaque,
+ opaque_and_semitransparent: new_opaque,
+ });
+ }
+
+ if transparency >= Transparency::SemiTransparent {
+ let parent = opaque_and_semitransparent;
+ let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
+ opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque,
+ opaque_and_semitransparent: new_opaque_and_semitransparent,
+ });
+ }
- macro_def: TokenExpander,
- macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
- macro_arg_shift: mbe::Shift,
- exp_map: Arc<mbe::TokenMap>,
+ let parent = ctxt;
+ db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque,
+ opaque_and_semitransparent,
+ })
+}
+pub trait SyntaxContextExt {
+ fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
+ fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
+ fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
+ fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
+ fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
+ fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
-impl HygieneInfo {
- fn map_ident_up(
- &self,
- db: &dyn ExpandDatabase,
- token: TextRange,
- ) -> Option<(InFile<TextRange>, Origin)> {
- let token_id = self.exp_map.token_by_range(token)?;
- let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
-
- let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
-
- let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
- Some(unshifted) => {
- token_id = unshifted;
- (&attr_args.1, self.attr_input_or_mac_def_start?)
- }
- None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
- },
- _ => match origin {
- mbe::Origin::Call => {
- (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
- }
- mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
- (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
- (&expander.def_site_token_map, *tt)
- }
- _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
- },
- },
- };
-
- let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
- Some((tt.with_value(range + tt.value), origin))
+#[inline(always)]
+fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
+ match n {
+ SyntaxContextId::SELF_REF => p,
+ _ => n,
}
}
-fn make_hygiene_info(
- db: &dyn ExpandDatabase,
- macro_file: MacroFile,
- loc: &MacroCallLoc,
-) -> HygieneInfo {
- let def = loc.def.ast_id().left().and_then(|id| {
- let def_tt = match id.to_node(db) {
- ast::Macro::MacroRules(mac) => mac.token_tree()?,
- ast::Macro::MacroDef(mac) => mac.body()?,
- };
- Some(InFile::new(id.file_id, def_tt))
- });
- let attr_input_or_mac_def = def.or_else(|| match loc.kind {
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
- .token_tree()?;
- Some(InFile::new(ast_id.file_id, tt))
- }
- _ => None,
- });
-
- let macro_def = db.macro_expander(loc.def);
- let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
- let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
- });
-
- HygieneInfo {
- file: macro_file,
- attr_input_or_mac_def_start: attr_input_or_mac_def
- .map(|it| it.map(|tt| tt.syntax().text_range().start())),
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
- macro_arg,
- macro_def,
- exp_map,
+impl SyntaxContextExt for SyntaxContextId {
+ fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
+ handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
+ }
+ fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
+ handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
+ }
+ fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
+ db.lookup_intern_syntax_context(self).parent
+ }
+ fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
+ let data = db.lookup_intern_syntax_context(self);
+ (data.outer_expn, data.outer_transparency)
+ }
+ fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
+ let data = db.lookup_intern_syntax_context(*self);
+ *self = data.parent;
+ (data.outer_expn, data.outer_transparency)
+ }
+ fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
+ let mut marks = marks_rev(self, db).collect::<Vec<_>>();
+ marks.reverse();
+ marks
}
}
-impl HygieneFrame {
- pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
- let (info, krate, local_inner) = match file_id.macro_file() {
- None => (None, None, false),
- Some(macro_file) => {
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
- match loc.def.kind {
- MacroDefKind::Declarative(_) => {
- (info, Some(loc.def.krate), loc.def.local_inner)
- }
- MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
- MacroDefKind::BuiltInAttr(..) => (info, None, false),
- MacroDefKind::BuiltInDerive(..) => (info, None, false),
- MacroDefKind::BuiltInEager(..) => (info, None, false),
- MacroDefKind::ProcMacro(..) => (info, None, false),
- }
- }
- };
-
- let Some((info, calling_file)) = info else {
- return HygieneFrame {
- expansion: None,
- local_inner,
- krate,
- call_site: None,
- def_site: None,
- };
- };
-
- let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
- let call_site = Some(db.hygiene_frame(calling_file));
-
- HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
- }
+// FIXME: Make this a SyntaxContextExt method once we have RPIT
+pub fn marks_rev(
+ ctxt: SyntaxContextId,
+ db: &dyn ExpandDatabase,
+) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
+ iter::successors(Some(ctxt), move |&mark| {
+ Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
+ })
+ .map(|ctx| ctx.outer_mark(db))
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index 4be55126b..d7819b315 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -4,7 +4,7 @@
//! tree originates not from the text of some `FileId`, but from some macro
//! expansion.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod db;
pub mod ast_id_map;
@@ -18,39 +18,59 @@ pub mod quote;
pub mod eager;
pub mod mod_path;
pub mod attrs;
+pub mod span;
+pub mod files;
mod fixup;
-use mbe::TokenMap;
-pub use mbe::{Origin, ValueResult};
-
-use ::tt::token_id as tt;
+use attrs::collect_attrs;
use triomphe::Arc;
-use std::{fmt, hash::Hash, iter};
+use std::{fmt, hash::Hash};
use base_db::{
- impl_intern_key,
- salsa::{self, InternId},
+ span::{HirFileIdRepr, SpanData, SyntaxContextId},
CrateId, FileId, FileRange, ProcMacroKind,
};
use either::Either;
use syntax::{
- algo::{self, skip_trivia_token},
- ast::{self, AstNode, HasDocComments},
- AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+ ast::{self, AstNode},
+ SyntaxNode, SyntaxToken, TextRange, TextSize,
};
use crate::{
- ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId},
attrs::AttrId,
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::TokenExpander,
+ fixup::SyntaxFixupUndoInfo,
mod_path::ModPath,
proc_macro::ProcMacroExpander,
+ span::{ExpansionSpanMap, SpanMap},
};
+pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId};
+pub use crate::files::{InFile, InMacroFile, InRealFile};
+
+pub use base_db::span::{HirFileId, MacroCallId, MacroFileId};
+pub use mbe::ValueResult;
+
+pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>;
+
+pub mod tt {
+ pub use base_db::span::SpanData;
+ pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor};
+
+ pub type Delimiter = ::tt::Delimiter<SpanData>;
+ pub type DelimSpan = ::tt::DelimSpan<SpanData>;
+ pub type Subtree = ::tt::Subtree<SpanData>;
+ pub type Leaf = ::tt::Leaf<SpanData>;
+ pub type Literal = ::tt::Literal<SpanData>;
+ pub type Punct = ::tt::Punct<SpanData>;
+ pub type Ident = ::tt::Ident<SpanData>;
+ pub type TokenTree = ::tt::TokenTree<SpanData>;
+}
+
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
@@ -59,6 +79,7 @@ pub enum ExpandError {
Mbe(mbe::ExpandError),
RecursionOverflowPoisoned,
Other(Box<Box<str>>),
+ ProcMacroPanic(Box<Box<str>>),
}
impl ExpandError {
@@ -81,56 +102,24 @@ impl fmt::Display for ExpandError {
ExpandError::RecursionOverflowPoisoned => {
f.write_str("overflow expanding the original macro")
}
+ ExpandError::ProcMacroPanic(it) => {
+ f.write_str("proc-macro panicked: ")?;
+ f.write_str(it)
+ }
ExpandError::Other(it) => f.write_str(it),
}
}
}
-/// Input to the analyzer is a set of files, where each file is identified by
-/// `FileId` and contains source code. However, another source of source code in
-/// Rust are macros: each macro can be thought of as producing a "temporary
-/// file". To assign an id to such a file, we use the id of the macro call that
-/// produced the file. So, a `HirFileId` is either a `FileId` (source code
-/// written by user), or a `MacroCallId` (source code produced by macro).
-///
-/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
-/// containing the call plus the offset of the macro call in the file. Note that
-/// this is a recursive definition! However, the size_of of `HirFileId` is
-/// finite (because everything bottoms out at the real `FileId`) and small
-/// (`MacroCallId` uses the location interning. You can check details here:
-/// <https://en.wikipedia.org/wiki/String_interning>).
-///
-/// The two variants are encoded in a single u32 which are differentiated by the MSB.
-/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
-/// `MacroCallId`.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub struct HirFileId(u32);
-
-impl fmt::Debug for HirFileId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.repr().fmt(f)
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroFile {
- pub macro_call_id: MacroCallId,
-}
-
-/// `MacroCallId` identifies a particular macro invocation, like
-/// `println!("Hello, {}", world)`.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroCallId(salsa::InternId);
-impl_intern_key!(MacroCallId);
-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
pub def: MacroDefId,
- pub(crate) krate: CrateId,
+ pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file.
- eager: Option<Box<EagerCallInfo>>,
+ eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind,
+ pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -139,6 +128,7 @@ pub struct MacroDefId {
pub kind: MacroDefKind,
pub local_inner: bool,
pub allow_internal_unsafe: bool,
+ // pub def_site: SyntaxContextId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -152,9 +142,9 @@ pub enum MacroDefKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-struct EagerCallInfo {
+pub struct EagerCallInfo {
/// The expanded argument of the eager macro.
- arg: Arc<(tt::Subtree, TokenMap)>,
+ arg: Arc<tt::Subtree>,
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
arg_id: MacroCallId,
error: Option<ExpandError>,
@@ -178,7 +168,7 @@ pub enum MacroCallKind {
},
Attr {
ast_id: AstId<ast::Item>,
- attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
+ attr_args: Option<Arc<tt::Subtree>>,
/// Syntactical index of the invoking `#[attribute]`.
///
/// Outer attributes are counted first, then inner attributes. This does not support
@@ -187,76 +177,68 @@ pub enum MacroCallKind {
},
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-enum HirFileIdRepr {
- FileId(FileId),
- MacroFile(MacroFile),
-}
+pub trait HirFileIdExt {
+ /// Returns the original file of this macro call hierarchy.
+ fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId;
-impl From<FileId> for HirFileId {
- fn from(FileId(id): FileId) -> Self {
- assert!(id < Self::MAX_FILE_ID);
- HirFileId(id)
- }
-}
+ /// Returns the original file of this macro call hierarchy while going into the included file if
+ /// one of the calls comes from an `include!``.
+ fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId;
-impl From<MacroFile> for HirFileId {
- fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self {
- let id = id.as_u32();
- assert!(id < Self::MAX_FILE_ID);
- HirFileId(id | Self::MACRO_FILE_TAG_MASK)
- }
-}
+ /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
+ fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
+
+ /// Return expansion information if it is a macro-expansion file
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo>;
-impl HirFileId {
- const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
- const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
+ fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase)
+ -> Option<InFile<ast::Attr>>;
+}
- /// For macro-expansion files, returns the file original source file the
- /// expansion originated from.
- pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
+impl HirFileIdExt for HirFileId {
+ fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
let mut file_id = self;
loop {
match file_id.repr() {
HirFileIdRepr::FileId(id) => break id,
- HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
- let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
- file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
- Some(Ok((_, file))) => file.into(),
- _ => loc.kind.file_id(),
- }
+ HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
+ file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id();
}
}
}
}
- pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
- let mut level = 0;
- let mut curr = self;
- while let Some(macro_file) = curr.macro_file() {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
-
- level += 1;
- curr = loc.kind.file_id();
+ fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId {
+ loop {
+ match self.repr() {
+ base_db::span::HirFileIdRepr::FileId(id) => break id,
+ base_db::span::HirFileIdRepr::MacroFile(file) => {
+ let loc = db.lookup_intern_macro_call(file.macro_call_id);
+ if loc.def.is_include() {
+ if let Some(eager) = &loc.eager {
+ if let Ok(it) = builtin_fn_macro::include_input_to_file_id(
+ db,
+ file.macro_call_id,
+ &eager.arg,
+ ) {
+ break it;
+ }
+ }
+ }
+ self = loc.kind.file_id();
+ }
+ }
}
- level
}
- /// If this is a macro call, returns the syntax node of the call.
- pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
- let macro_file = self.macro_file()?;
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- Some(loc.to_node(db))
- }
-
- /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
- pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
+ fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
loop {
match call.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)),
- HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ HirFileIdRepr::FileId(file_id) => {
+ break Some(InRealFile { file_id, value: call.value })
+ }
+ HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
}
}
@@ -264,12 +246,11 @@ impl HirFileId {
}
/// Return expansion information if it is a macro-expansion file
- pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
- let macro_file = self.macro_file()?;
- ExpansionInfo::new(db, macro_file)
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
+ Some(ExpansionInfo::new(db, self.macro_file()?))
}
- pub fn as_builtin_derive_attr_node(
+ fn as_builtin_derive_attr_node(
&self,
db: &dyn db::ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
@@ -281,104 +262,84 @@ impl HirFileId {
};
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
}
+}
- pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- matches!(
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
- MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
- )
- }
- None => false,
- }
- }
+pub trait MacroFileIdExt {
+ fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32;
+ /// If this is a macro call, returns the syntax node of the call.
+ fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode>;
- pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- matches!(
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
- MacroDefKind::BuiltInDerive(..)
- )
- }
- None => false,
- }
- }
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo;
+
+ fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
+ fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
/// Return whether this file is an include macro
- pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
- }
- _ => false,
- }
+ fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
+
+ fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool;
+ /// Return whether this file is an attr macro
+ fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
+
+ /// Return whether this file is the pseudo expansion of the derive attribute.
+ /// See [`crate::builtin_attr_macro::derive_attr_expand`].
+ fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool;
+}
+
+impl MacroFileIdExt for MacroFileId {
+ fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
+ db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
}
+ fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
+ let mut level = 0;
+ let mut macro_file = self;
+ loop {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
- }
- _ => false,
+ level += 1;
+ macro_file = match loc.kind.file_id().repr() {
+ HirFileIdRepr::FileId(_) => break level,
+ HirFileIdRepr::MacroFile(it) => it,
+ };
}
}
- /// Return whether this file is an attr macro
- pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.kind, MacroCallKind::Attr { .. })
- }
- _ => false,
- }
+ /// Return expansion information if it is a macro-expansion file
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo {
+ ExpansionInfo::new(db, self)
}
- /// Return whether this file is the pseudo expansion of the derive attribute.
- /// See [`crate::builtin_attr_macro::derive_attr_expand`].
- pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- loc.def.is_attribute_derive()
- }
- None => false,
- }
+ fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ matches!(
+ db.lookup_intern_macro_call(self.macro_call_id).def.kind,
+ MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ )
}
- #[inline]
- pub fn is_macro(self) -> bool {
- self.0 & Self::MACRO_FILE_TAG_MASK != 0
+ fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ matches!(
+ db.lookup_intern_macro_call(self.macro_call_id).def.kind,
+ MacroDefKind::BuiltInDerive(..)
+ )
}
- #[inline]
- pub fn macro_file(self) -> Option<MacroFile> {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => None,
- _ => Some(MacroFile {
- macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
- }),
- }
+ fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
+ db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
}
- #[inline]
- pub fn file_id(self) -> Option<FileId> {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => Some(FileId(self.0)),
- _ => None,
- }
+ fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
- fn repr(self) -> HirFileIdRepr {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => HirFileIdRepr::FileId(FileId(self.0)),
- _ => HirFileIdRepr::MacroFile(MacroFile {
- macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
- }),
- }
+ fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { .. })
+ }
+
+ fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ loc.def.is_attribute_derive()
}
}
@@ -388,20 +349,35 @@ impl MacroDefId {
db: &dyn db::ExpandDatabase,
krate: CrateId,
kind: MacroCallKind,
+ call_site: SyntaxContextId,
) -> MacroCallId {
- db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
+ db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
+ }
+
+ pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile<TextRange> {
+ match self.kind {
+ MacroDefKind::Declarative(id)
+ | MacroDefKind::BuiltIn(_, id)
+ | MacroDefKind::BuiltInAttr(_, id)
+ | MacroDefKind::BuiltInDerive(_, id)
+ | MacroDefKind::BuiltInEager(_, id) => {
+ id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
+ }
+ MacroDefKind::ProcMacro(_, _, id) => {
+ id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
+ }
+ }
}
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
- let id = match self.kind {
+ match self.kind {
MacroDefKind::ProcMacro(.., id) => return Either::Right(id),
MacroDefKind::Declarative(id)
| MacroDefKind::BuiltIn(_, id)
| MacroDefKind::BuiltInAttr(_, id)
| MacroDefKind::BuiltInDerive(_, id)
- | MacroDefKind::BuiltInEager(_, id) => id,
- };
- Either::Left(id)
+ | MacroDefKind::BuiltInEager(_, id) => Either::Left(id),
+ }
}
pub fn is_proc_macro(&self) -> bool {
@@ -443,6 +419,18 @@ impl MacroDefId {
}
impl MacroCallLoc {
+ pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData {
+ let ast_id = self.kind.erased_ast_id();
+ let file_id = self.kind.file_id();
+ let range = db.ast_id_map(file_id).get_erased(ast_id).text_range();
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range),
+ HirFileIdRepr::MacroFile(m) => {
+ db.parse_macro_expansion(m).value.1.span_at(range.start())
+ }
+ }
+ }
+
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
match self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
@@ -451,9 +439,9 @@ impl MacroCallLoc {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(derive_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -464,9 +452,9 @@ impl MacroCallLoc {
if self.def.is_attribute_derive() {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(invoc_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -483,20 +471,26 @@ impl MacroCallLoc {
match self.kind {
MacroCallKind::FnLike { expand_to, .. } => expand_to,
MacroCallKind::Derive { .. } => ExpandTo::Items,
- MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Statements,
+ MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Items,
MacroCallKind::Attr { .. } => {
- // is this always correct?
+ // FIXME(stmt_expr_attributes)
ExpandTo::Items
}
}
}
}
-// FIXME: attribute indices do not account for nested `cfg_attr`
-
impl MacroCallKind {
+ fn descr(&self) -> &'static str {
+ match self {
+ MacroCallKind::FnLike { .. } => "macro call",
+ MacroCallKind::Derive { .. } => "derive macro",
+ MacroCallKind::Attr { .. } => "attribute macro",
+ }
+ }
+
/// Returns the file containing the macro invocation.
- fn file_id(&self) -> HirFileId {
+ pub fn file_id(&self) -> HirFileId {
match *self {
MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
| MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
@@ -504,6 +498,14 @@ impl MacroCallKind {
}
}
+ fn erased_ast_id(&self) -> ErasedFileAstId {
+ match *self {
+ MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(),
+ MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(),
+ MacroCallKind::Attr { ast_id: InFile { value, .. }, .. } => value.erase(),
+ }
+ }
+
/// Returns the original file range that best describes the location of this macro call.
///
/// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
@@ -548,242 +550,179 @@ impl MacroCallKind {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
- ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ collect_attrs(&ast_id.to_node(db))
.nth(derive_attr_index.ast_index())
.expect("missing derive")
+ .1
.expect_left("derive is a doc comment?")
.syntax()
.text_range()
}
// FIXME: handle `cfg_attr`
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .expect("missing attribute")
- .expect_left("attribute macro is a doc comment?")
- .syntax()
- .text_range(),
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ collect_attrs(&ast_id.to_node(db))
+ .nth(invoc_attr_index.ast_index())
+ .expect("missing attribute")
+ .1
+ .expect_left("attribute macro is a doc comment?")
+ .syntax()
+ .text_range()
+ }
};
FileRange { range, file_id }
}
- fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
+ fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile<Option<SyntaxNode>> {
match self {
- MacroCallKind::FnLike { ast_id, .. } => ast_id
- .to_in_file_node(db)
- .map(|it| Some(it.token_tree()?.syntax().clone()))
- .transpose(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone()))
+ }
MacroCallKind::Derive { ast_id, .. } => {
- Some(ast_id.to_in_file_node(db).syntax().cloned())
+ ast_id.to_in_file_node(db).syntax().cloned().map(Some)
}
MacroCallKind::Attr { ast_id, .. } => {
- Some(ast_id.to_in_file_node(db).syntax().cloned())
+ ast_id.to_in_file_node(db).syntax().cloned().map(Some)
}
}
}
}
-impl MacroCallId {
- pub fn as_file(self) -> HirFileId {
- MacroFile { macro_call_id: self }.into()
- }
-
- pub fn as_macro_file(self) -> MacroFile {
- MacroFile { macro_call_id: self }
- }
-}
-
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
+// FIXME: can be expensive to create, we should check the use sites and maybe replace them with
+// simpler function calls if the map is only used once
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo {
- expanded: InMacroFile<SyntaxNode>,
+ pub expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes
- arg: InFile<SyntaxNode>,
+ arg: InFile<Option<SyntaxNode>>,
/// The `macro_rules!` or attribute input.
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: TokenExpander,
- macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
- /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
- /// and as such we need to shift tokens if they are part of an attributes input instead of their item.
- macro_arg_shift: mbe::Shift,
- exp_map: Arc<mbe::TokenMap>,
+ macro_arg: Arc<tt::Subtree>,
+ pub exp_map: Arc<ExpansionSpanMap>,
+ arg_map: SpanMap,
}
impl ExpansionInfo {
- pub fn expanded(&self) -> InFile<SyntaxNode> {
- self.expanded.clone().into()
+ pub fn expanded(&self) -> InMacroFile<SyntaxNode> {
+ self.expanded.clone()
}
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
- Some(self.arg.with_value(self.arg.value.parent()?))
+ Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
}
- /// Map a token down from macro input into the macro expansion.
- ///
- /// The inner workings of this function differ slightly depending on the type of macro we are dealing with:
- /// - declarative:
- /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input)
- /// , as tokens can mapped in and out of it.
- /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy
- /// way to map all the tokens.
- /// - attribute:
- /// Attributes have two different inputs, the input tokentree in the attribute node and the item
- /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here
- /// as well. Currently this is done by shifting the attribute input by the maximum id of the item.
- /// - function-like and derives:
- /// Both of these only have one simple call site input so no special handling is required here.
- pub fn map_token_down(
- &self,
- db: &dyn db::ExpandDatabase,
- item: Option<ast::Item>,
- token: InFile<&SyntaxToken>,
- // FIXME: use this for range mapping, so that we can resolve inline format args
- _relative_token_offset: Option<TextSize>,
- ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
- assert_eq!(token.file_id, self.arg.file_id);
- let token_id_in_attr_input = if let Some(item) = item {
- // check if we are mapping down in an attribute input
- // this is a special case as attributes can have two inputs
- let call_id = self.expanded.file_id.macro_call_id;
- let loc = db.lookup_intern_macro_call(call_id);
-
- let token_range = token.value.text_range();
- match &loc.kind {
- MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => {
- // FIXME: handle `cfg_attr`
- let attr = item
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?;
- match attr.token_tree() {
- Some(token_tree)
- if token_tree.syntax().text_range().contains_range(token_range) =>
- {
- let attr_input_start =
- token_tree.left_delimiter_token()?.text_range().start();
- let relative_range =
- token.value.text_range().checked_sub(attr_input_start)?;
- // shift by the item's tree's max id
- let token_id = attr_args.1.token_by_range(relative_range)?;
-
- let token_id = if loc.def.is_attribute_derive() {
- // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
- token_id
- } else {
- self.macro_arg_shift.shift(token_id)
- };
- Some(token_id)
- }
- _ => None,
- }
- }
- _ => None,
- }
- } else {
- None
- };
-
- let token_id = match token_id_in_attr_input {
- Some(token_id) => token_id,
- // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual
- None => {
- let relative_range =
- token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
- let token_id = self.macro_arg.1.token_by_range(relative_range)?;
- // conditionally shift the id by a declarative macro definition
- self.macro_def.map_id_down(token_id)
- }
- };
-
+ /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
+ pub fn map_range_down<'a>(
+ &'a self,
+ span: SpanData,
+ ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> {
let tokens = self
.exp_map
- .ranges_by_token(token_id, token.value.kind())
+ .ranges_with_span(span)
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
- Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
+ Some(InMacroFile::new(self.expanded.file_id, tokens))
}
- /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
- pub fn map_token_up(
+ /// Looks up the span at the given offset.
+ pub fn span_for_offset(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ offset: TextSize,
+ ) -> (FileRange, SyntaxContextId) {
+ debug_assert!(self.expanded.value.text_range().contains(offset));
+ let span = self.exp_map.span_at(offset);
+ let anchor_offset = db
+ .ast_id_map(span.anchor.file_id.into())
+ .get_erased(span.anchor.ast_id)
+ .text_range()
+ .start();
+ (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+ }
+
+ /// Maps up the text range out of the expansion hierarchy back into the original file its from.
+ pub fn map_node_range_up(
&self,
db: &dyn db::ExpandDatabase,
- token: InFile<&SyntaxToken>,
- ) -> Option<(InFile<SyntaxToken>, Origin)> {
- assert_eq!(token.file_id, self.expanded.file_id.into());
- // Fetch the id through its text range,
- let token_id = self.exp_map.token_by_range(token.value.text_range())?;
- // conditionally unshifting the id to accommodate for macro-rules def site
- let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
-
- let call_id = self.expanded.file_id.macro_call_id;
- let loc = db.lookup_intern_macro_call(call_id);
-
- // Special case: map tokens from `include!` expansions to the included file
- if loc.def.is_include() {
- if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
- let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
- let source = db.parse(file_id);
-
- let token = source.syntax_node().covering_element(range).into_token()?;
-
- return Some((InFile::new(file_id.into(), token), Origin::Call));
+ range: TextRange,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ debug_assert!(self.expanded.value.text_range().contains_range(range));
+ let mut spans = self.exp_map.spans_for_range(range);
+ let SpanData { range, anchor, ctx } = spans.next()?;
+ let mut start = range.start();
+ let mut end = range.end();
+
+ for span in spans {
+ if span.anchor != anchor || span.ctx != ctx {
+ return None;
}
+ start = start.min(span.range.start());
+ end = end.max(span.range.end());
}
-
- // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
- let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => {
- if loc.def.is_attribute_derive() {
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
- } else {
- // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input
- // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
- match self.macro_arg_shift.unshift(token_id) {
- Some(unshifted) => {
- token_id = unshifted;
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
- }
- None => (&self.macro_arg.1, self.arg.clone()),
- }
- }
- }
- _ => match origin {
- mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
- mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
- (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
- (&expander.def_site_token_map, tt.syntax().cloned())
- }
- _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
- },
+ let anchor_offset =
+ db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ Some((
+ FileRange {
+ file_id: anchor.file_id,
+ range: TextRange::new(start, end) + anchor_offset,
},
- };
+ ctx,
+ ))
+ }
- let range = token_map.first_range_by_token(token_id, token.value.kind())?;
- let token =
- tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
- Some((tt.with_value(token), origin))
+ /// Maps up the text range out of the expansion into is macro call.
+ pub fn map_range_up_once(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ token: TextRange,
+ ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
+ debug_assert!(self.expanded.value.text_range().contains_range(token));
+ let span = self.exp_map.span_at(token.start());
+ match &self.arg_map {
+ SpanMap::RealSpanMap(_) => {
+ let file_id = span.anchor.file_id.into();
+ let anchor_offset =
+ db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
+ InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
+ }
+ SpanMap::ExpansionSpanMap(arg_map) => {
+ let arg_range = self
+ .arg
+ .value
+ .as_ref()
+ .map_or_else(|| TextRange::empty(TextSize::from(0)), |it| it.text_range());
+ InFile::new(
+ self.arg.file_id,
+ arg_map
+ .ranges_with_span(span)
+ .filter(|range| range.intersect(arg_range).is_some())
+ .collect(),
+ )
+ }
+ }
}
- fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
+ pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let arg_tt = loc.kind.arg(db)?;
+ let arg_tt = loc.kind.arg(db);
+ let arg_map = db.span_map(arg_tt.file_id);
let macro_def = db.macro_expander(loc.def);
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
- let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
+ let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
+ (
+ Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: Vec::new(),
+ }),
+ SyntaxFixupUndoInfo::NONE,
+ )
});
let def = loc.def.ast_id().left().and_then(|id| {
@@ -799,342 +738,27 @@ impl ExpansionInfo {
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ let tt = collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
+ .and_then(|x| Either::left(x.1))?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
- Some(ExpansionInfo {
+ ExpansionInfo {
expanded,
arg: arg_tt,
attr_input_or_mac_def,
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
- })
- }
-}
-
-/// `AstId` points to an AST node in any file.
-///
-/// It is stable across reparses, and can be used as salsa key/value.
-pub type AstId<N> = InFile<FileAstId<N>>;
-
-impl<N: AstIdNode> AstId<N> {
- pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
- self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
- }
- pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
- InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
- }
- pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
- db.ast_id_map(self.file_id).get(self.value)
- }
-}
-
-pub type ErasedAstId = InFile<ErasedFileAstId>;
-
-impl ErasedAstId {
- pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
- db.ast_id_map(self.file_id).get_raw(self.value)
- }
-}
-
-/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
-///
-/// Typical usages are:
-///
-/// * `InFile<SyntaxNode>` -- syntax node in a file
-/// * `InFile<ast::FnDef>` -- ast node in a file
-/// * `InFile<TextSize>` -- offset in a file
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-pub struct InFile<T> {
- pub file_id: HirFileId,
- pub value: T,
-}
-
-impl<T> InFile<T> {
- pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
- InFile { file_id, value }
- }
-
- pub fn with_value<U>(&self, value: U) -> InFile<U> {
- InFile::new(self.file_id, value)
- }
-
- pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
- InFile::new(self.file_id, f(self.value))
- }
-
- pub fn as_ref(&self) -> InFile<&T> {
- self.with_value(&self.value)
- }
-
- pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
- db.parse_or_expand(self.file_id)
- }
-}
-
-impl<T: Clone> InFile<&T> {
- pub fn cloned(&self) -> InFile<T> {
- self.with_value(self.value.clone())
- }
-}
-
-impl<T> InFile<Option<T>> {
- pub fn transpose(self) -> Option<InFile<T>> {
- let value = self.value?;
- Some(InFile::new(self.file_id, value))
- }
-}
-
-impl<L, R> InFile<Either<L, R>> {
- pub fn transpose(self) -> Either<InFile<L>, InFile<R>> {
- match self.value {
- Either::Left(l) => Either::Left(InFile::new(self.file_id, l)),
- Either::Right(r) => Either::Right(InFile::new(self.file_id, r)),
+ arg_map,
}
}
}
-impl InFile<&SyntaxNode> {
- pub fn ancestors_with_macros(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
- iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => node.file_id.call_node(db),
- })
- }
-
- /// Skips the attributed item that caused the macro invocation we are climbing up
- pub fn ancestors_with_macros_skip_attr_item(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
- let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => {
- let parent_node = node.file_id.call_node(db)?;
- if node.file_id.is_attr_macro(db) {
- // macro call was an attributed item, skip it
- // FIXME: does this fail if this is a direct expansion of another macro?
- parent_node.map(|node| node.parent()).transpose()
- } else {
- Some(parent_node)
- }
- }
- };
- iter::successors(succ(&self.cloned()), succ)
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- ///
- /// For attributes and derives, this will point back to the attribute only.
- /// For the entire item use [`InFile::original_file_range_full`].
- pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range(db)
- }
- }
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range_with_body(db)
- }
- }
- }
-
- /// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
- match ascend_node_border_tokens(db, self) {
- Some(InFile { file_id, value: (first, last) }) => {
- let original_file = file_id.original_file(db);
- let range = first.text_range().cover(last.text_range());
- if file_id != original_file.into() {
- tracing::error!("Failed mapping up more for {:?}", range);
- return None;
- }
- Some(FileRange { file_id: original_file, range })
- }
- _ if !self.file_id.is_macro() => Some(FileRange {
- file_id: self.file_id.original_file(db),
- range: self.value.text_range(),
- }),
- _ => None,
- }
- }
-
- pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
- // This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- if !self.file_id.is_macro() {
- return Some(self.map(Clone::clone));
- } else if !self.file_id.is_attr_macro(db) {
- return None;
- }
-
- if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self)
- {
- if file_id.is_macro() {
- let range = first.text_range().cover(last.text_range());
- tracing::error!("Failed mapping out of macro file for {:?}", range);
- return None;
- }
- // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
- let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
- let kind = self.value.kind();
- let value = anc.ancestors().find(|it| it.kind() == kind)?;
- return Some(InFile::new(file_id, value));
- }
- None
- }
-}
-
-impl InFile<SyntaxToken> {
- pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
- let expansion = self.file_id.expansion_info(db)?;
- expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range(db)
- }
- }
- }
-
- /// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
- Some(FileRange { file_id, range: self.value.text_range() })
- }
- HirFileIdRepr::MacroFile(_) => {
- let expansion = self.file_id.expansion_info(db)?;
- let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?;
- let original_file = file_id.original_file(db);
- if file_id != original_file.into() {
- return None;
- }
- Some(FileRange { file_id: original_file, range: value.text_range() })
- }
- }
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-pub struct InMacroFile<T> {
- pub file_id: MacroFile,
- pub value: T,
-}
-
-impl<T> From<InMacroFile<T>> for InFile<T> {
- fn from(macro_file: InMacroFile<T>) -> Self {
- InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
- }
-}
-
-fn ascend_node_border_tokens(
- db: &dyn db::ExpandDatabase,
- InFile { file_id, value: node }: InFile<&SyntaxNode>,
-) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
- let expansion = file_id.expansion_info(db)?;
-
- let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next);
- let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev);
-
- // FIXME: Once the token map rewrite is done, this shouldnt need to rely on syntax nodes and tokens anymore
- let first = first_token(node)?;
- let last = last_token(node)?;
- let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?;
- let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?;
- (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value)))
-}
-
-fn ascend_call_token(
- db: &dyn db::ExpandDatabase,
- expansion: &ExpansionInfo,
- token: InFile<SyntaxToken>,
-) -> Option<InFile<SyntaxToken>> {
- let mut mapping = expansion.map_token_up(db, token.as_ref())?;
- while let (mapped, Origin::Call) = mapping {
- match mapped.file_id.expansion_info(db) {
- Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
- None => return Some(mapped),
- }
- }
- None
-}
-
-impl<N: AstNode> InFile<N> {
- pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
- self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
- }
-
- // FIXME: this should return `Option<InFileNotHirFile<N>>`
- pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<N>> {
- // This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- if !self.file_id.is_macro() {
- return Some(self);
- } else if !self.file_id.is_attr_macro(db) {
- return None;
- }
-
- if let Some(InFile { file_id, value: (first, last) }) =
- ascend_node_border_tokens(db, self.syntax())
- {
- if file_id.is_macro() {
- let range = first.text_range().cover(last.text_range());
- tracing::error!("Failed mapping out of macro file for {:?}", range);
- return None;
- }
- // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
- let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
- let value = anc.ancestors().find_map(N::cast)?;
- return Some(InFile::new(file_id, value));
- }
- None
- }
-
- pub fn syntax(&self) -> InFile<&SyntaxNode> {
- self.with_value(self.value.syntax())
- }
-}
-
/// In Rust, macros expand token trees to token trees. When we want to turn a
/// token tree into an AST node, we need to figure out what kind of AST node we
/// want: something like `foo` can be a type, an expression, or a pattern.
@@ -1199,9 +823,4 @@ impl ExpandTo {
}
}
-#[derive(Debug)]
-pub struct UnresolvedMacro {
- pub path: ModPath,
-}
-
intern::impl_internable!(ModPath, attrs::AttrInput);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index 69aa09c4a..9534b5039 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -7,11 +7,11 @@ use std::{
use crate::{
db::ExpandDatabase,
- hygiene::Hygiene,
- name::{known, Name},
+ hygiene::{marks_rev, SyntaxContextExt, Transparency},
+ name::{known, AsName, Name},
+ span::SpanMapRef,
};
-use base_db::CrateId;
-use either::Either;
+use base_db::{span::SyntaxContextId, CrateId};
use smallvec::SmallVec;
use syntax::{ast, AstNode};
@@ -38,6 +38,7 @@ pub enum PathKind {
Crate,
/// Absolute path (::foo)
Abs,
+ // FIXME: Remove this
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
@@ -46,9 +47,9 @@ impl ModPath {
pub fn from_src(
db: &dyn ExpandDatabase,
path: ast::Path,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
- convert_path(db, None, path, hygiene)
+ convert_path(db, None, path, span_map)
}
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
@@ -193,33 +194,36 @@ fn convert_path(
db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
let prefix = match path.qualifier() {
- Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
+ Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
None => prefix,
};
let segment = path.segment()?;
let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
- match hygiene.name_ref_to_name(db, name_ref) {
- Either::Left(name) => {
- // no type args in use
- let mut res = prefix.unwrap_or_else(|| {
- ModPath::from_kind(
- segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
- )
- });
- res.segments.push(name);
- res
- }
- Either::Right(crate_id) => {
- return Some(ModPath::from_segments(
- PathKind::DollarCrate(crate_id),
- iter::empty(),
- ))
+ if name_ref.text() == "$crate" {
+ if prefix.is_some() {
+ return None;
}
+ ModPath::from_kind(
+ resolve_crate_root(
+ db,
+ span_map.span_for_range(name_ref.syntax().text_range()).ctx,
+ )
+ .map(PathKind::DollarCrate)
+ .unwrap_or(PathKind::Crate),
+ )
+ } else {
+ let mut res = prefix.unwrap_or_else(|| {
+ ModPath::from_kind(
+ segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
+ )
+ });
+ res.segments.push(name_ref.as_name());
+ res
}
}
ast::PathSegmentKind::SelfTypeKw => {
@@ -261,8 +265,14 @@ fn convert_path(
// We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
- if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
- mod_path.kind = PathKind::DollarCrate(crate_id);
+ let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
+ if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
+ if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+ mod_path.kind = match resolve_crate_root(db, syn_ctx) {
+ Some(crate_root) => PathKind::DollarCrate(crate_root),
+ None => PathKind::Crate,
+ }
+ }
}
}
}
@@ -270,6 +280,29 @@ fn convert_path(
Some(mod_path)
}
+pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
+ // When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
+ // we don't want to pretend that the `macro_rules!` definition is in the `macro`
+ // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
+ // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
+ // definitions actually produced by `macro` and `macro` definitions produced by
+ // `macro_rules!`, but at least such configurations are not stable yet.
+ ctxt = ctxt.normalize_to_macro_rules(db);
+ let mut iter = marks_rev(ctxt, db).peekable();
+ let mut result_mark = None;
+ // Find the last opaque mark from the end if it exists.
+ while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
+ result_mark = Some(mark);
+ iter.next();
+ }
+ // Then find the last semi-transparent mark from the end if it exists.
+ while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
+ result_mark = Some(mark);
+ }
+
+ result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
+}
+
pub use crate::name as __name;
#[macro_export]
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index a876f48bd..a321f94cd 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -470,6 +470,7 @@ pub mod known {
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
+ pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
#[macro_export]
macro_rules! name {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
index 41675c630..de5777968 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -1,6 +1,6 @@
//! Proc Macro Expander stub
-use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
+use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
@@ -33,11 +33,15 @@ impl ProcMacroExpander {
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id {
- ProcMacroId(DUMMY_ID) => {
- ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
- }
+ ProcMacroId(DUMMY_ID) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
+ ExpandError::UnresolvedProcMacro(def_crate),
+ ),
ProcMacroId(id) => {
let proc_macros = db.proc_macros();
let proc_macros = match proc_macros.get(&def_crate) {
@@ -45,7 +49,7 @@ impl ProcMacroExpander {
Some(Err(_)) | None => {
never!("Non-dummy expander even though there are no proc macros");
return ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@@ -59,7 +63,7 @@ impl ProcMacroExpander {
id
);
return ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@@ -68,7 +72,8 @@ impl ProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
- match proc_macro.expander.expand(tt, attr_arg, env) {
+ match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
+ {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes
@@ -78,9 +83,10 @@ impl ProcMacroExpander {
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
}
ProcMacroExpansionError::System(text)
- | ProcMacroExpansionError::Panic(text) => {
- ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
- }
+ | ProcMacroExpansionError::Panic(text) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
+ ExpandError::ProcMacroPanic(Box::new(text.into_boxed_str())),
+ ),
},
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
index ab3809abc..acbde26c8 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
@@ -1,5 +1,7 @@
//! A simplified version of quote-crate like quasi quote macro
+use base_db::span::SpanData;
+
// A helper macro quote macro
// FIXME:
// 1. Not all puncts are handled
@@ -8,109 +10,109 @@
#[doc(hidden)]
#[macro_export]
macro_rules! __quote {
- () => {
+ ($span:ident) => {
Vec::<crate::tt::TokenTree>::new()
};
- ( @SUBTREE $delim:ident $($tt:tt)* ) => {
+ ( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
{
- let children = $crate::__quote!($($tt)*);
+ let children = $crate::__quote!($span $($tt)*);
crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
- open: crate::tt::TokenId::unspecified(),
- close: crate::tt::TokenId::unspecified(),
+ open: $span,
+ close: $span,
},
token_trees: $crate::quote::IntoTt::to_tokens(children),
}
}
};
- ( @PUNCT $first:literal ) => {
+ ( @PUNCT($span:ident) $first:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Alone,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
]
}
};
- ( @PUNCT $first:literal, $sec:literal ) => {
+ ( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Joint,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into(),
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $sec,
spacing: crate::tt::Spacing::Alone,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
]
}
};
// hash variable
- ( # $first:ident $($tail:tt)* ) => {
+ ($span:ident # $first:ident $($tail:tt)* ) => {
{
- let token = $crate::quote::ToTokenTree::to_token($first);
+ let token = $crate::quote::ToTokenTree::to_token($first, $span);
let mut tokens = vec![token.into()];
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
- ( ## $first:ident $($tail:tt)* ) => {
+ ($span:ident ## $first:ident $($tail:tt)* ) => {
{
- let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>();
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
// Brace
- ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
+ ($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
// Bracket
- ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
+ ($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
// Parenthesis
- ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
+ ($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
// Literal
- ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
+ ($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
// Ident
- ( $tt:ident ) => {
+ ($span:ident $tt:ident ) => {
vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(),
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
}]
};
// Puncts
// FIXME: Not all puncts are handled
- ( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
- ( & ) => {$crate::__quote!(@PUNCT '&')};
- ( , ) => {$crate::__quote!(@PUNCT ',')};
- ( : ) => {$crate::__quote!(@PUNCT ':')};
- ( ; ) => {$crate::__quote!(@PUNCT ';')};
- ( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
- ( . ) => {$crate::__quote!(@PUNCT '.')};
- ( < ) => {$crate::__quote!(@PUNCT '<')};
- ( > ) => {$crate::__quote!(@PUNCT '>')};
- ( ! ) => {$crate::__quote!(@PUNCT '!')};
-
- ( $first:tt $($tail:tt)+ ) => {
+ ($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
+ ($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
+ ($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
+ ($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
+ ($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
+ ($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
+ ($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
+ ($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
+ ($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
+ ($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
+
+ ($span:ident $first:tt $($tail:tt)+ ) => {
{
- let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
@@ -122,19 +124,22 @@ macro_rules! __quote {
/// It probably should implement in proc-macro
#[macro_export]
macro_rules! quote {
- ( $($tt:tt)* ) => {
- $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
+ ($span:ident=> $($tt:tt)* ) => {
+ $crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
}
}
pub(crate) trait IntoTt {
- fn to_subtree(self) -> crate::tt::Subtree;
+ fn to_subtree(self, span: SpanData) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
}
impl IntoTt for Vec<crate::tt::TokenTree> {
- fn to_subtree(self) -> crate::tt::Subtree {
- crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self }
+ fn to_subtree(self, span: SpanData) -> crate::tt::Subtree {
+ crate::tt::Subtree {
+ delimiter: crate::tt::Delimiter::invisible_spanned(span),
+ token_trees: self,
+ }
}
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
@@ -143,7 +148,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
}
impl IntoTt for crate::tt::Subtree {
- fn to_subtree(self) -> crate::tt::Subtree {
+ fn to_subtree(self, _: SpanData) -> crate::tt::Subtree {
self
}
@@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree {
}
pub(crate) trait ToTokenTree {
- fn to_token(self) -> crate::tt::TokenTree;
+ fn to_token(self, span: SpanData) -> crate::tt::TokenTree;
}
impl ToTokenTree for crate::tt::TokenTree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self
}
}
impl ToTokenTree for &crate::tt::TokenTree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.clone()
}
}
impl ToTokenTree for crate::tt::Subtree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.into()
}
}
macro_rules! impl_to_to_tokentrees {
- ($($ty:ty => $this:ident $im:block);*) => {
+ ($($span:ident: $ty:ty => $this:ident $im:block);*) => {
$(
impl ToTokenTree for $ty {
- fn to_token($this) -> crate::tt::TokenTree {
+ fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.into();
leaf.into()
}
}
impl ToTokenTree for &$ty {
- fn to_token($this) -> crate::tt::TokenTree {
+ fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into()
}
@@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees {
}
impl_to_to_tokentrees! {
- u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- crate::tt::Leaf => self { self };
- crate::tt::Literal => self { self };
- crate::tt::Ident => self { self };
- crate::tt::Punct => self { self };
- &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
- String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
+ span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} };
+ _span: crate::tt::Leaf => self { self };
+ _span: crate::tt::Literal => self { self };
+ _span: crate::tt::Ident => self { self };
+ _span: crate::tt::Punct => self { self };
+ span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
+ span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}
}
#[cfg(test)]
mod tests {
+ use crate::tt;
+ use base_db::{
+ span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+ };
+ use expect_test::expect;
+ use syntax::{TextRange, TextSize};
+
+ const DUMMY: tt::SpanData = tt::SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+
#[test]
fn test_quote_delimiters() {
- assert_eq!(quote!({}).to_string(), "{}");
- assert_eq!(quote!(()).to_string(), "()");
- assert_eq!(quote!([]).to_string(), "[]");
+ assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
+ assert_eq!(quote!(DUMMY =>()).to_string(), "()");
+ assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
}
#[test]
fn test_quote_idents() {
- assert_eq!(quote!(32).to_string(), "32");
- assert_eq!(quote!(struct).to_string(), "struct");
+ assert_eq!(quote!(DUMMY =>32).to_string(), "32");
+ assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
}
#[test]
fn test_quote_hash_simple_literal() {
let a = 20;
- assert_eq!(quote!(#a).to_string(), "20");
+ assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
let s: String = "hello".into();
- assert_eq!(quote!(#s).to_string(), "\"hello\"");
+ assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
}
fn mk_ident(name: &str) -> crate::tt::Ident {
- crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
+ crate::tt::Ident { text: name.into(), span: DUMMY }
}
#[test]
fn test_quote_hash_token_tree() {
let a = mk_ident("hello");
- let quoted = quote!(#a);
+ let quoted = quote!(DUMMY =>#a);
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}");
- assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }
+ IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
}
#[test]
fn test_quote_simple_derive_copy() {
let name = mk_ident("Foo");
- let quoted = quote! {
+ let quoted = quote! {DUMMY =>
impl Clone for #name {
fn clone(&self) -> Self {
Self {}
@@ -268,18 +289,19 @@ mod tests {
// }
let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")];
- let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
+ let fields =
+ fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees);
let list = crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace,
- open: crate::tt::TokenId::unspecified(),
- close: crate::tt::TokenId::unspecified(),
+ open: DUMMY,
+ close: DUMMY,
},
token_trees: fields.collect(),
};
- let quoted = quote! {
+ let quoted = quote! {DUMMY =>
impl Clone for #struct_name {
fn clone(&self) -> Self {
Self #list
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span.rs
new file mode 100644
index 000000000..fe476a40f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/span.rs
@@ -0,0 +1,124 @@
+//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
+//! as associating spans with text ranges in a particular file.
+use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+};
+use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
+use triomphe::Arc;
+
+use crate::db::ExpandDatabase;
+
+pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
+
+/// Spanmap for a macro file or a real file
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SpanMap {
+ /// Spanmap for a macro file
+ ExpansionSpanMap(Arc<ExpansionSpanMap>),
+ /// Spanmap for a real file
+ RealSpanMap(Arc<RealSpanMap>),
+}
+
+#[derive(Copy, Clone)]
+pub enum SpanMapRef<'a> {
+ /// Spanmap for a macro file
+ ExpansionSpanMap(&'a ExpansionSpanMap),
+ /// Spanmap for a real file
+ RealSpanMap(&'a RealSpanMap),
+}
+
+impl mbe::SpanMapper<SpanData> for SpanMap {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+impl mbe::SpanMapper<SpanData> for RealSpanMap {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+
+impl SpanMap {
+ pub fn span_for_range(&self, range: TextRange) -> SpanData {
+ match self {
+ Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
+ Self::RealSpanMap(span_map) => span_map.span_for_range(range),
+ }
+ }
+
+ pub fn as_ref(&self) -> SpanMapRef<'_> {
+ match self {
+ Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
+ Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
+ }
+ }
+}
+
+impl SpanMapRef<'_> {
+ pub fn span_for_range(self, range: TextRange) -> SpanData {
+ match self {
+ Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
+ Self::RealSpanMap(span_map) => span_map.span_for_range(range),
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Debug)]
+pub struct RealSpanMap {
+ file_id: FileId,
+ /// Invariant: Sorted vec over TextSize
+ // FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
+ pairs: Box<[(TextSize, ErasedFileAstId)]>,
+ end: TextSize,
+}
+
+impl RealSpanMap {
+ /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
+ pub fn absolute(file_id: FileId) -> Self {
+ RealSpanMap {
+ file_id,
+ pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]),
+ end: TextSize::new(!0),
+ }
+ }
+
+ pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
+ let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
+ let ast_id_map = db.ast_id_map(file_id.into());
+ let tree = db.parse(file_id).tree();
+ pairs
+ .extend(tree.items().map(|item| {
+ (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())
+ }));
+ RealSpanMap {
+ file_id,
+ pairs: pairs.into_boxed_slice(),
+ end: tree.syntax().text_range().end(),
+ }
+ }
+
+ pub fn span_for_range(&self, range: TextRange) -> SpanData {
+ assert!(
+ range.end() <= self.end,
+ "range {range:?} goes beyond the end of the file {:?}",
+ self.end
+ );
+ let start = range.start();
+ let idx = self
+ .pairs
+ .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
+ .unwrap_err();
+ let (offset, ast_id) = self.pairs[idx - 1];
+ SpanData {
+ range: range - offset,
+ anchor: SpanAnchor { file_id: self.file_id, ast_id },
+ ctx: SyntaxContextId::ROOT,
+ }
+ }
+}