summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/mbe
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/rust-analyzer/crates/mbe')
-rw-r--r--src/tools/rust-analyzer/crates/mbe/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs62
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs48
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs175
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs338
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs159
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/parser.rs68
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs1022
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs15
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs4
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/token_map.rs156
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs48
12 files changed, 968 insertions, 1129 deletions
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
index 82105522e..adab1003d 100644
--- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -15,7 +15,7 @@ doctest = false
cov-mark = "2.0.0-pre.1"
rustc-hash = "1.1.0"
smallvec.workspace = true
-tracing = "0.1.35"
+tracing.workspace = true
# local deps
syntax.workspace = true
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index 9d43e1304..f503aecce 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
- syntax_node_to_token_tree, tt, DeclarativeMacro,
+ syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
};
#[test]
@@ -38,7 +38,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
- let res = rules[&id].expand(tt);
+ let res = rules[&id].expand(&tt, |_| ());
assert!(res.err.is_none());
res.value.token_trees.len()
})
@@ -47,14 +47,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413);
}
-fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
+fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
-fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@@ -64,14 +64,17 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
.filter_map(ast::MacroRules::cast)
.map(|rule| {
let id = rule.name().unwrap().to_string();
- let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
+ let def_tt =
+ syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
(id, def_tt)
})
.collect()
}
/// Generate random invocation fixtures from rules
-fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
+fn invocation_fixtures(
+ rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
+) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
let mut seed = 123456789;
let mut res = Vec::new();
@@ -93,8 +96,8 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
loop {
let mut subtree = tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
+ open: DUMMY,
+ close: DUMMY,
kind: tt::DelimiterKind::Invisible,
},
token_trees: vec![],
@@ -102,7 +105,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed);
}
- if it.expand(subtree.clone()).err.is_none() {
+ if it.expand(&subtree, |_| ()).err.is_none() {
res.push((name.clone(), subtree));
break;
}
@@ -116,7 +119,11 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
}
return res;
- fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
+ fn collect_from_op(
+ op: &Op<DummyTestSpanData>,
+ parent: &mut tt::Subtree<DummyTestSpanData>,
+ seed: &mut usize,
+ ) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@@ -202,38 +209,21 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
- fn make_ident(ident: &str) -> tt::TokenTree {
- tt::Leaf::Ident(tt::Ident {
- span: tt::TokenId::unspecified(),
- text: SmolStr::new(ident),
- })
- .into()
+ fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
}
- fn make_punct(char: char) -> tt::TokenTree {
- tt::Leaf::Punct(tt::Punct {
- span: tt::TokenId::unspecified(),
- char,
- spacing: tt::Spacing::Alone,
- })
- .into()
+ fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
}
- fn make_literal(lit: &str) -> tt::TokenTree {
- tt::Leaf::Literal(tt::Literal {
- span: tt::TokenId::unspecified(),
- text: SmolStr::new(lit),
- })
- .into()
+ fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
}
fn make_subtree(
kind: tt::DelimiterKind,
- token_trees: Option<Vec<tt::TokenTree>>,
- ) -> tt::TokenTree {
+ token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
+ ) -> tt::TokenTree<DummyTestSpanData> {
tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
- kind,
- },
+ delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
token_trees: token_trees.unwrap_or_default(),
}
.into()
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index f2d89d3ef..0e755f69b 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -7,15 +7,17 @@ mod transcriber;
use rustc_hash::FxHashMap;
use syntax::SmolStr;
+use tt::Span;
-use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult};
+use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
-pub(crate) fn expand_rules(
- rules: &[crate::Rule],
- input: &tt::Subtree,
+pub(crate) fn expand_rules<S: Span>(
+ rules: &[crate::Rule<S>],
+ input: &tt::Subtree<S>,
+ marker: impl Fn(&mut S) + Copy,
is_2021: bool,
-) -> ExpandResult<tt::Subtree> {
- let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
+) -> ExpandResult<tt::Subtree<S>> {
+ let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
for rule in rules {
let new_match = matcher::match_(&rule.lhs, input, is_2021);
@@ -24,7 +26,7 @@ pub(crate) fn expand_rules(
// Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail.
let ExpandResult { value, err: transcribe_err } =
- transcriber::transcribe(&rule.rhs, &new_match.bindings);
+ transcriber::transcribe(&rule.rhs, &new_match.bindings, marker);
if transcribe_err.is_none() {
return ExpandResult::ok(value);
}
@@ -43,11 +45,11 @@ pub(crate) fn expand_rules(
if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } =
- transcriber::transcribe(&rule.rhs, &match_.bindings);
+ transcriber::transcribe(&rule.rhs, &match_.bindings, marker);
ExpandResult { value, err: match_.err.or(transcribe_err) }
} else {
ExpandResult::new(
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
+ tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] },
ExpandError::NoMatchingRule,
)
}
@@ -98,31 +100,37 @@ pub(crate) fn expand_rules(
/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
/// many is not a plain `usize`, but a `&[usize]`.
-#[derive(Debug, Default, Clone, PartialEq, Eq)]
-struct Bindings {
- inner: FxHashMap<SmolStr, Binding>,
+#[derive(Debug, Clone, PartialEq, Eq)]
+struct Bindings<S> {
+ inner: FxHashMap<SmolStr, Binding<S>>,
+}
+
+impl<S> Default for Bindings<S> {
+ fn default() -> Self {
+ Self { inner: Default::default() }
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
-enum Binding {
- Fragment(Fragment),
- Nested(Vec<Binding>),
+enum Binding<S> {
+ Fragment(Fragment<S>),
+ Nested(Vec<Binding<S>>),
Empty,
Missing(MetaVarKind),
}
#[derive(Debug, Clone, PartialEq, Eq)]
-enum Fragment {
+enum Fragment<S> {
/// token fragments are just copy-pasted into the output
- Tokens(tt::TokenTree),
+ Tokens(tt::TokenTree<S>),
/// Expr ast fragments are surrounded with `()` on insertion to preserve
/// precedence. Note that this impl is different from the one currently in
/// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
///
- /// At one point in time, we tried to to use "fake" delimiters here a-la
+ /// At one point in time, we tried to use "fake" delimiters here à la
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
/// tricky to handle in the parser, and rustc doesn't handle those either.
- Expr(tt::TokenTree),
+ Expr(tt::Subtree<S>),
/// There are roughly two types of paths: paths in expression context, where a
/// separator `::` between an identifier and its following generic argument list
/// is mandatory, and paths in type context, where `::` can be omitted.
@@ -132,5 +140,5 @@ enum Fragment {
/// and is trasncribed as an expression-context path, verbatim transcription
/// would cause a syntax error. We need to fix it up just before transcribing;
/// see `transcriber::fix_up_and_push_path_tt()`.
- Path(tt::TokenTree),
+ Path(tt::Subtree<S>),
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index 1471af98b..012b02a3f 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -63,21 +63,21 @@ use std::rc::Rc;
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
+use tt::Span;
use crate::{
expander::{Binding, Bindings, ExpandResult, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
- tt,
tt_iter::TtIter,
ExpandError, MetaTemplate, ValueResult,
};
-impl Bindings {
+impl<S: Span> Bindings<S> {
fn push_optional(&mut self, name: &SmolStr) {
// FIXME: Do we have a better way to represent an empty token ?
// Insert an empty subtree for empty token
let tt =
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into();
+ tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into();
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
}
@@ -85,14 +85,14 @@ impl Bindings {
self.inner.insert(name.clone(), Binding::Empty);
}
- fn bindings(&self) -> impl Iterator<Item = &Binding> {
+ fn bindings(&self) -> impl Iterator<Item = &Binding<S>> {
self.inner.values()
}
}
-#[derive(Clone, Debug, Default, PartialEq, Eq)]
-pub(super) struct Match {
- pub(super) bindings: Bindings,
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(super) struct Match<S> {
+ pub(super) bindings: Bindings<S>,
/// We currently just keep the first error and count the rest to compare matches.
pub(super) err: Option<ExpandError>,
pub(super) err_count: usize,
@@ -102,7 +102,19 @@ pub(super) struct Match {
pub(super) bound_count: usize,
}
-impl Match {
+impl<S> Default for Match<S> {
+ fn default() -> Self {
+ Self {
+ bindings: Default::default(),
+ err: Default::default(),
+ err_count: Default::default(),
+ unmatched_tts: Default::default(),
+ bound_count: Default::default(),
+ }
+ }
+}
+
+impl<S> Match<S> {
fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take();
self.err = prev_err.or(Some(err));
@@ -111,12 +123,16 @@ impl Match {
}
/// Matching errors are added to the `Match`.
-pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match {
+pub(super) fn match_<S: Span>(
+ pattern: &MetaTemplate<S>,
+ input: &tt::Subtree<S>,
+ is_2021: bool,
+) -> Match<S> {
let mut res = match_loop(pattern, input, is_2021);
res.bound_count = count(res.bindings.bindings());
return res;
- fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
+ fn count<'a, S: 'a>(bindings: impl Iterator<Item = &'a Binding<S>>) -> usize {
bindings
.map(|it| match it {
Binding::Fragment(_) => 1,
@@ -129,10 +145,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool)
}
#[derive(Debug, Clone)]
-enum BindingKind {
+enum BindingKind<S> {
Empty(SmolStr),
Optional(SmolStr),
- Fragment(SmolStr, Fragment),
+ Fragment(SmolStr, Fragment<S>),
Missing(SmolStr, MetaVarKind),
Nested(usize, usize),
}
@@ -146,13 +162,18 @@ enum LinkNode<T> {
Parent { idx: usize, len: usize },
}
-#[derive(Default)]
-struct BindingsBuilder {
- nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
+struct BindingsBuilder<S> {
+ nodes: Vec<Vec<LinkNode<Rc<BindingKind<S>>>>>,
nested: Vec<Vec<LinkNode<usize>>>,
}
-impl BindingsBuilder {
+impl<S> Default for BindingsBuilder<S> {
+ fn default() -> Self {
+ Self { nodes: Default::default(), nested: Default::default() }
+ }
+}
+
+impl<S: Span> BindingsBuilder<S> {
fn alloc(&mut self) -> BindingsIdx {
let idx = self.nodes.len();
self.nodes.push(Vec::new());
@@ -189,7 +210,7 @@ impl BindingsBuilder {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
}
- fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) {
+ fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment<S>) {
self.nodes[idx.0]
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
}
@@ -210,11 +231,11 @@ impl BindingsBuilder {
idx.0 = new_idx;
}
- fn build(self, idx: &BindingsIdx) -> Bindings {
+ fn build(self, idx: &BindingsIdx) -> Bindings<S> {
self.build_inner(&self.nodes[idx.0])
}
- fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind>>]) -> Bindings {
+ fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind<S>>>]) -> Bindings<S> {
let mut bindings = Bindings::default();
let mut nodes = Vec::new();
self.collect_nodes(link_nodes, &mut nodes);
@@ -264,7 +285,7 @@ impl BindingsBuilder {
&'a self,
id: usize,
len: usize,
- nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind>>]>,
+ nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind<S>>>]>,
) {
self.nested[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
@@ -272,7 +293,7 @@ impl BindingsBuilder {
});
}
- fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) {
+ fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings<S>>) {
let last = &self.nodes[idx];
let mut nested_refs: Vec<&[_]> = Vec::new();
self.nested[nested_idx].iter().for_each(|it| match *it {
@@ -283,7 +304,7 @@ impl BindingsBuilder {
nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
}
- fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) {
+ fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind<S>>) {
self.nodes[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
@@ -292,8 +313,8 @@ impl BindingsBuilder {
fn collect_nodes<'a>(
&'a self,
- link_nodes: &'a [LinkNode<Rc<BindingKind>>],
- nodes: &mut Vec<&'a BindingKind>,
+ link_nodes: &'a [LinkNode<Rc<BindingKind<S>>>],
+ nodes: &mut Vec<&'a BindingKind<S>>,
) {
link_nodes.iter().for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
@@ -303,22 +324,22 @@ impl BindingsBuilder {
}
#[derive(Debug, Clone)]
-struct MatchState<'t> {
+struct MatchState<'t, S> {
/// The position of the "dot" in this matcher
- dot: OpDelimitedIter<'t>,
+ dot: OpDelimitedIter<'t, S>,
/// Token subtree stack
/// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
/// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
/// that where the bottom of the stack is the outermost matcher.
- stack: SmallVec<[OpDelimitedIter<'t>; 4]>,
+ stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>,
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
/// before we enter the repetition.
- up: Option<Box<MatchState<'t>>>,
+ up: Option<Box<MatchState<'t, S>>>,
/// The separator if we are in a repetition.
- sep: Option<Separator>,
+ sep: Option<Separator<S>>,
/// The KleeneOp of this sequence if we are in a repetition.
sep_kind: Option<RepeatKind>,
@@ -330,7 +351,7 @@ struct MatchState<'t> {
bindings: BindingsIdx,
/// Cached result of meta variable parsing
- meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
+ meta_result: Option<(TtIter<'t, S>, ExpandResult<Option<Fragment<S>>>)>,
/// Is error occurred in this state, will `poised` to "parent"
is_error: bool,
@@ -355,16 +376,16 @@ struct MatchState<'t> {
/// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `error_items`: the set of items in errors, used for error-resilient parsing
#[inline]
-fn match_loop_inner<'t>(
- src: TtIter<'t>,
- stack: &[TtIter<'t>],
- res: &mut Match,
- bindings_builder: &mut BindingsBuilder,
- cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
- bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
- next_items: &mut Vec<MatchState<'t>>,
- eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
- error_items: &mut SmallVec<[MatchState<'t>; 1]>,
+fn match_loop_inner<'t, S: Span>(
+ src: TtIter<'t, S>,
+ stack: &[TtIter<'t, S>],
+ res: &mut Match<S>,
+ bindings_builder: &mut BindingsBuilder<S>,
+ cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ next_items: &mut Vec<MatchState<'t, S>>,
+ eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
is_2021: bool,
) {
macro_rules! try_push {
@@ -468,7 +489,7 @@ fn match_loop_inner<'t>(
if let Ok(subtree) = src.clone().expect_subtree() {
if subtree.delimiter.kind == delimiter.kind {
item.stack.push(item.dot);
- item.dot = tokens.iter_delimited(Some(delimiter));
+ item.dot = tokens.iter_delimited(Some(*delimiter));
cur_items.push(item);
}
}
@@ -587,9 +608,9 @@ fn match_loop_inner<'t>(
}
}
-fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match {
+fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
let mut src = TtIter::new(src);
- let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
+ let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
let mut res = Match::default();
let mut error_recover_item = None;
@@ -736,16 +757,16 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match
}
}
-fn match_meta_var(
+fn match_meta_var<S: Span>(
kind: MetaVarKind,
- input: &mut TtIter<'_>,
+ input: &mut TtIter<'_, S>,
is_2021: bool,
-) -> ExpandResult<Option<Fragment>> {
+) -> ExpandResult<Option<Fragment<S>>> {
let fragment = match kind {
MetaVarKind::Path => {
return input
.expect_fragment(parser::PrefixEntryPoint::Path)
- .map(|it| it.map(Fragment::Path));
+ .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path));
}
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
@@ -771,9 +792,21 @@ fn match_meta_var(
}
_ => {}
};
- return input
- .expect_fragment(parser::PrefixEntryPoint::Expr)
- .map(|tt| tt.map(Fragment::Expr));
+ return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
+ tt.map(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![leaf.into()],
+ },
+ tt::TokenTree::Subtree(mut s) => {
+ if s.delimiter.kind == tt::DelimiterKind::Invisible {
+ s.delimiter.kind = tt::DelimiterKind::Parenthesis;
+ }
+ s
+ }
+ })
+ .map(Fragment::Expr)
+ });
}
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind {
@@ -796,7 +829,7 @@ fn match_meta_var(
match neg {
None => lit.into(),
Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![neg, lit.into()],
}),
}
@@ -811,7 +844,7 @@ fn match_meta_var(
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
}
-fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
+fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate<S>) {
for op in pattern.iter() {
match op {
Op::Var { name, .. } => collector_fun(name.clone()),
@@ -824,38 +857,38 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
}
}
}
-impl MetaTemplate {
- fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
+impl<S: Span> MetaTemplate<S> {
+ fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> {
OpDelimitedIter {
inner: &self.0,
idx: 0,
- delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED),
+ delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE),
}
}
}
#[derive(Debug, Clone, Copy)]
-enum OpDelimited<'a> {
- Op(&'a Op),
+enum OpDelimited<'a, S> {
+ Op(&'a Op<S>),
Open,
Close,
}
#[derive(Debug, Clone, Copy)]
-struct OpDelimitedIter<'a> {
- inner: &'a [Op],
- delimited: &'a tt::Delimiter,
+struct OpDelimitedIter<'a, S> {
+ inner: &'a [Op<S>],
+ delimited: tt::Delimiter<S>,
idx: usize,
}
-impl<'a> OpDelimitedIter<'a> {
+impl<'a, S: Span> OpDelimitedIter<'a, S> {
fn is_eof(&self) -> bool {
let len = self.inner.len()
+ if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
self.idx >= len
}
- fn peek(&self) -> Option<OpDelimited<'a>> {
+ fn peek(&self) -> Option<OpDelimited<'a, S>> {
match self.delimited.kind {
tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op),
_ => match self.idx {
@@ -871,8 +904,8 @@ impl<'a> OpDelimitedIter<'a> {
}
}
-impl<'a> Iterator for OpDelimitedIter<'a> {
- type Item = OpDelimited<'a>;
+impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> {
+ type Item = OpDelimited<'a, S>;
fn next(&mut self) -> Option<Self::Item> {
let res = self.peek();
@@ -888,8 +921,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
-impl TtIter<'_> {
- fn expect_separator(&mut self, separator: &Separator) -> bool {
+impl<S: Span> TtIter<'_, S> {
+ fn expect_separator(&mut self, separator: &Separator<S>) -> bool {
let mut fork = self.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
@@ -919,7 +952,7 @@ impl TtIter<'_> {
ok
}
- fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
+ fn expect_tt(&mut self) -> Result<tt::TokenTree<S>, ()> {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) {
if punct.char == '\'' {
self.expect_lifetime()
@@ -927,7 +960,7 @@ impl TtIter<'_> {
let puncts = self.expect_glued_punct()?;
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees,
}))
}
@@ -936,7 +969,7 @@ impl TtIter<'_> {
}
}
- fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
+ fn expect_lifetime(&mut self) -> Result<tt::TokenTree<S>, ()> {
let punct = self.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
@@ -944,7 +977,7 @@ impl TtIter<'_> {
let ident = self.expect_ident_or_underscore()?;
Ok(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![
tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(),
@@ -953,7 +986,7 @@ impl TtIter<'_> {
.into())
}
- fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
+ fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<S>> {
let mut fork = self.clone();
match fork.expect_char(c) {
Ok(_) => {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index cdac2f1e3..7a3e8653c 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -2,31 +2,29 @@
//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
use syntax::SmolStr;
+use tt::{Delimiter, Span};
use crate::{
expander::{Binding, Bindings, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
- tt::{self, Delimiter},
CountError, ExpandError, ExpandResult, MetaTemplate,
};
-impl Bindings {
- fn contains(&self, name: &str) -> bool {
- self.inner.contains_key(name)
- }
-
- fn get(&self, name: &str) -> Result<&Binding, ExpandError> {
+impl<S: Span> Bindings<S> {
+ fn get(&self, name: &str) -> Result<&Binding<S>, ExpandError> {
match self.inner.get(name) {
Some(binding) => Ok(binding),
- None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))),
+ None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))),
}
}
fn get_fragment(
&self,
name: &str,
+ mut span: S,
nesting: &mut [NestingState],
- ) -> Result<Fragment, ExpandError> {
+ marker: impl Fn(&mut S),
+ ) -> Result<Fragment<S>, ExpandError> {
macro_rules! binding_err {
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
}
@@ -48,54 +46,75 @@ impl Bindings {
};
}
match b {
- Binding::Fragment(it) => Ok(it.clone()),
- // emit some reasonable default expansion for missing bindings,
- // this gives better recovery than emitting the `$fragment-name` verbatim
- Binding::Missing(it) => Ok(match it {
- MetaVarKind::Stmt => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
- span: tt::TokenId::unspecified(),
- char: ';',
- spacing: tt::Spacing::Alone,
- })))
- }
- MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ Binding::Fragment(f @ (Fragment::Path(sub) | Fragment::Expr(sub))) => {
+ let tt::Subtree { delimiter, token_trees } = sub;
+ marker(&mut span);
+ let subtree = tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
- kind: tt::DelimiterKind::Brace,
+ // FIXME split span
+ open: span,
+ close: span,
+ kind: delimiter.kind,
},
- token_trees: vec![],
- })),
- // FIXME: Meta and Item should get proper defaults
- MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
- Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: token_trees.clone(),
+ };
+ Ok(match f {
+ Fragment::Tokens(_) => unreachable!(),
+ Fragment::Expr(_) => Fragment::Expr,
+ Fragment::Path(_) => Fragment::Path,
+ }(subtree))
+ }
+ Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()),
+ // emit some reasonable default expansion for missing bindings,
+ // this gives better recovery than emitting the `$fragment-name` verbatim
+ Binding::Missing(it) => Ok({
+ marker(&mut span);
+ match it {
+ MetaVarKind::Stmt => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ span,
+ char: ';',
+ spacing: tt::Spacing::Alone,
+ })))
+ }
+ MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: span,
+ close: span,
+ kind: tt::DelimiterKind::Brace,
+ },
token_trees: vec![],
- }))
- }
- MetaVarKind::Path
- | MetaVarKind::Ty
- | MetaVarKind::Pat
- | MetaVarKind::PatParam
- | MetaVarKind::Expr
- | MetaVarKind::Ident => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("missing"),
- span: tt::TokenId::unspecified(),
- })))
- }
- MetaVarKind::Lifetime => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("'missing"),
- span: tt::TokenId::unspecified(),
- })))
- }
- MetaVarKind::Literal => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("\"missing\""),
- span: tt::TokenId::unspecified(),
- })))
+ })),
+ // FIXME: Meta and Item should get proper defaults
+ MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
+ Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: vec![],
+ }))
+ }
+ MetaVarKind::Path
+ | MetaVarKind::Ty
+ | MetaVarKind::Pat
+ | MetaVarKind::PatParam
+ | MetaVarKind::Expr
+ | MetaVarKind::Ident => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("missing"),
+ span,
+ })))
+ }
+ MetaVarKind::Lifetime => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("'missing"),
+ span,
+ })))
+ }
+ MetaVarKind::Literal => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("\"missing\""),
+ span,
+ })))
+ }
}
}),
Binding::Nested(_) => {
@@ -108,13 +127,14 @@ impl Bindings {
}
}
-pub(super) fn transcribe(
- template: &MetaTemplate,
- bindings: &Bindings,
-) -> ExpandResult<tt::Subtree> {
+pub(super) fn transcribe<S: Span>(
+ template: &MetaTemplate<S>,
+ bindings: &Bindings<S>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<tt::Subtree<S>> {
let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
- let mut arena: Vec<tt::TokenTree> = Vec::new();
- expand_subtree(&mut ctx, template, None, &mut arena)
+ let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
+ expand_subtree(&mut ctx, template, None, &mut arena, marker)
}
#[derive(Debug)]
@@ -129,50 +149,75 @@ struct NestingState {
}
#[derive(Debug)]
-struct ExpandCtx<'a> {
- bindings: &'a Bindings,
+struct ExpandCtx<'a, S> {
+ bindings: &'a Bindings<S>,
nesting: Vec<NestingState>,
}
-fn expand_subtree(
- ctx: &mut ExpandCtx<'_>,
- template: &MetaTemplate,
- delimiter: Option<Delimiter>,
- arena: &mut Vec<tt::TokenTree>,
-) -> ExpandResult<tt::Subtree> {
+fn expand_subtree<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ template: &MetaTemplate<S>,
+ delimiter: Option<Delimiter<S>>,
+ arena: &mut Vec<tt::TokenTree<S>>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<tt::Subtree<S>> {
// remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
let start_elements = arena.len();
let mut err = None;
'ops: for op in template.iter() {
match op {
- Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()),
- Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()),
+ Op::Literal(it) => arena.push(
+ tt::Leaf::from({
+ let mut it = it.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ ),
+ Op::Ident(it) => arena.push(
+ tt::Leaf::from({
+ let mut it = it.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ ),
Op::Punct(puncts) => {
for punct in puncts {
- arena.push(tt::Leaf::from(*punct).into());
+ arena.push(
+ tt::Leaf::from({
+ let mut it = punct.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ );
}
}
Op::Subtree { tokens, delimiter } => {
+ let mut delimiter = *delimiter;
+ marker(&mut delimiter.open);
+ marker(&mut delimiter.close);
let ExpandResult { value: tt, err: e } =
- expand_subtree(ctx, tokens, Some(*delimiter), arena);
+ expand_subtree(ctx, tokens, Some(delimiter), arena, marker);
err = err.or(e);
arena.push(tt.into());
}
Op::Var { name, id, .. } => {
- let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
+ let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
err = err.or(e);
push_fragment(arena, fragment);
}
Op::Repeat { tokens: subtree, kind, separator } => {
let ExpandResult { value: fragment, err: e } =
- expand_repeat(ctx, subtree, *kind, separator, arena);
+ expand_repeat(ctx, subtree, *kind, separator, arena, marker);
err = err.or(e);
push_fragment(arena, fragment)
}
Op::Ignore { name, id } => {
// Expand the variable, but ignore the result. This registers the repetition count.
// FIXME: Any emitted errors are dropped.
- expand_var(ctx, name, *id);
+ expand_var(ctx, name, *id, marker);
}
Op::Index { depth } => {
let index =
@@ -180,7 +225,8 @@ fn expand_subtree(
arena.push(
tt::Leaf::Literal(tt::Literal {
text: index.to_string().into(),
- span: tt::TokenId::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -239,7 +285,8 @@ fn expand_subtree(
arena.push(
tt::Leaf::Literal(tt::Literal {
text: c.to_string().into(),
- span: tt::TokenId::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -250,60 +297,70 @@ fn expand_subtree(
let tts = arena.drain(start_elements..).collect();
ExpandResult {
value: tt::Subtree {
- delimiter: delimiter.unwrap_or_else(tt::Delimiter::unspecified),
+ delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible),
token_trees: tts,
},
err,
}
}
-fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> {
+fn expand_var<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ v: &SmolStr,
+ id: S,
+ marker: impl Fn(&mut S),
+) -> ExpandResult<Fragment<S>> {
// We already handle $crate case in mbe parser
debug_assert!(v != "crate");
- if !ctx.bindings.contains(v) {
- // Note that it is possible to have a `$var` inside a macro which is not bound.
- // For example:
- // ```
- // macro_rules! foo {
- // ($a:ident, $b:ident, $c:tt) => {
- // macro_rules! bar {
- // ($bi:ident) => {
- // fn $bi() -> u8 {$c}
- // }
- // }
- // }
- // ```
- // We just treat it a normal tokens
- let tt = tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: vec![
- tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
- .into(),
- tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
- ],
+ match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
+ Ok(it) => ExpandResult::ok(it),
+ Err(ExpandError::UnresolvedBinding(_)) => {
+ // Note that it is possible to have a `$var` inside a macro which is not bound.
+ // For example:
+ // ```
+ // macro_rules! foo {
+ // ($a:ident, $b:ident, $c:tt) => {
+ // macro_rules! bar {
+ // ($bi:ident) => {
+ // fn $bi() -> u8 {$c}
+ // }
+ // }
+ // }
+ // ```
+ // We just treat it a normal tokens
+ let tt = tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: vec![
+ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
+ .into(),
+ tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
+ ],
+ }
+ .into();
+ ExpandResult::ok(Fragment::Tokens(tt))
}
- .into();
- ExpandResult::ok(Fragment::Tokens(tt))
- } else {
- ctx.bindings.get_fragment(v, &mut ctx.nesting).map_or_else(
- |e| ExpandResult {
- value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())),
- err: Some(e),
- },
- ExpandResult::ok,
- )
+ Err(e) => ExpandResult {
+ value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
+ // FIXME
+ open: S::DUMMY,
+ // FIXME
+ close: S::DUMMY,
+ }))),
+ err: Some(e),
+ },
}
}
-fn expand_repeat(
- ctx: &mut ExpandCtx<'_>,
- template: &MetaTemplate,
+fn expand_repeat<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ template: &MetaTemplate<S>,
kind: RepeatKind,
- separator: &Option<Separator>,
- arena: &mut Vec<tt::TokenTree>,
-) -> ExpandResult<Fragment> {
- let mut buf: Vec<tt::TokenTree> = Vec::new();
+ separator: &Option<Separator<S>>,
+ arena: &mut Vec<tt::TokenTree<S>>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<Fragment<S>> {
+ let mut buf: Vec<tt::TokenTree<S>> = Vec::new();
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
// Dirty hack to make macro-expansion terminate.
// This should be replaced by a proper macro-by-example implementation
@@ -313,7 +370,8 @@ fn expand_repeat(
let mut err = None;
loop {
- let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
+ let ExpandResult { value: mut t, err: e } =
+ expand_subtree(ctx, template, None, arena, marker);
let nesting_state = ctx.nesting.last_mut().unwrap();
if nesting_state.at_end || !nesting_state.hit {
break;
@@ -330,8 +388,11 @@ fn expand_repeat(
);
return ExpandResult {
value: Fragment::Tokens(
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }
- .into(),
+ tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![],
+ }
+ .into(),
),
err: Some(ExpandError::LimitExceeded),
};
@@ -342,7 +403,7 @@ fn expand_repeat(
continue;
}
- t.delimiter = tt::Delimiter::unspecified();
+ t.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
push_subtree(&mut buf, t);
if let Some(sep) = separator {
@@ -376,7 +437,7 @@ fn expand_repeat(
// Check if it is a single token subtree without any delimiter
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
- let tt = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: buf }.into();
+ let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into();
if RepeatKind::OneOrMore == kind && counter == 0 {
return ExpandResult {
@@ -387,25 +448,18 @@ fn expand_repeat(
ExpandResult { value: Fragment::Tokens(tt), err }
}
-fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
+fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
match fragment {
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
- Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => {
- if tt.delimiter.kind == tt::DelimiterKind::Invisible {
- tt.delimiter = tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
- kind: tt::DelimiterKind::Parenthesis,
- };
- }
- buf.push(tt.into())
+ Fragment::Expr(sub) => {
+ push_subtree(buf, sub);
}
- Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt),
- Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt),
+ Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt),
+ Fragment::Tokens(tt) => buf.push(tt),
}
}
-fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
+fn push_subtree<S>(buf: &mut Vec<tt::TokenTree<S>>, tt: tt::Subtree<S>) {
match tt.delimiter.kind {
tt::DelimiterKind::Invisible => buf.extend(tt.token_trees),
_ => buf.push(tt.into()),
@@ -415,7 +469,7 @@ fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
/// Inserts the path separator `::` between an identifier and its following generic
/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
/// we need this fixup.
-fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
+fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt::Subtree<S>) {
stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
let mut prev_was_ident = false;
// Note that we only need to fix up the top-level `TokenTree`s because the
@@ -432,7 +486,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
tt::Leaf::Punct(tt::Punct {
char: ':',
spacing: tt::Spacing::Joint,
- span: tt::Span::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -440,7 +495,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
tt::Leaf::Punct(tt::Punct {
char: ':',
spacing: tt::Spacing::Alone,
- span: tt::Span::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -453,9 +509,9 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
/// defined by the metavar expression.
-fn count(
- ctx: &ExpandCtx<'_>,
- binding: &Binding,
+fn count<S>(
+ ctx: &ExpandCtx<'_, S>,
+ binding: &Binding<S>,
our_depth: usize,
count_depth: Option<usize>,
) -> Result<usize, CountError> {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index 9d886a1c9..933179858 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -3,10 +3,10 @@
//! interface, although it contains some code to bridge `SyntaxNode`s and
//! `TokenTree`s as well!
//!
-//! The tes for this functionality live in another crate:
+//! The tests for this functionality live in another crate:
//! `hir_def::macro_expansion_tests::mbe`.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod parser;
mod expander;
@@ -18,8 +18,8 @@ mod to_parser_input;
mod benchmark;
mod token_map;
-use ::tt::token_id as tt;
use stdx::impl_from;
+use tt::Span;
use std::fmt;
@@ -28,19 +28,21 @@ use crate::{
tt_iter::TtIter,
};
-pub use self::tt::{Delimiter, DelimiterKind, Punct};
+// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use ::parser::TopEntryPoint;
+pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext};
pub use crate::{
syntax_bridge::{
- parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map,
- syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree,
- syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
- SyntheticTokenId,
+ parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span,
+ syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node,
+ SpanMapper,
},
- token_map::TokenMap,
+ token_map::SpanMap,
};
+pub use crate::syntax_bridge::dummy_test_span_utils::*;
+
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum ParseError {
UnexpectedToken(Box<str>),
@@ -73,6 +75,7 @@ impl fmt::Display for ParseError {
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum ExpandError {
BindingError(Box<Box<str>>),
+ UnresolvedBinding(Box<Box<str>>),
LeftoverTokens,
ConversionError,
LimitExceeded,
@@ -95,6 +98,10 @@ impl fmt::Display for ExpandError {
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
ExpandError::BindingError(e) => f.write_str(e),
+ ExpandError::UnresolvedBinding(binding) => {
+ f.write_str("could not find binding ")?;
+ f.write_str(binding)
+ }
ExpandError::ConversionError => f.write_str("could not convert tokens"),
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
@@ -124,10 +131,8 @@ impl fmt::Display for CountError {
/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct DeclarativeMacro {
- rules: Box<[Rule]>,
- /// Highest id of the token we have in TokenMap
- shift: Shift,
+pub struct DeclarativeMacro<S> {
+ rules: Box<[Rule<S>]>,
// This is used for correctly determining the behavior of the pat fragment
// FIXME: This should be tracked by hygiene of the fragment identifier!
is_2021: bool,
@@ -135,96 +140,18 @@ pub struct DeclarativeMacro {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-struct Rule {
- lhs: MetaTemplate,
- rhs: MetaTemplate,
+struct Rule<S> {
+ lhs: MetaTemplate<S>,
+ rhs: MetaTemplate<S>,
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct Shift(u32);
-
-impl Shift {
- pub fn new(tt: &tt::Subtree) -> Shift {
- // Note that TokenId is started from zero,
- // We have to add 1 to prevent duplication.
- let value = max_id(tt).map_or(0, |it| it + 1);
- return Shift(value);
-
- // Find the max token id inside a subtree
- fn max_id(subtree: &tt::Subtree) -> Option<u32> {
- let filter =
- |tt: &_| match tt {
- tt::TokenTree::Subtree(subtree) => {
- let tree_id = max_id(subtree);
- if subtree.delimiter.open != tt::TokenId::unspecified() {
- Some(tree_id.map_or(subtree.delimiter.open.0, |t| {
- t.max(subtree.delimiter.open.0)
- }))
- } else {
- tree_id
- }
- }
- tt::TokenTree::Leaf(leaf) => {
- let &(tt::Leaf::Ident(tt::Ident { span, .. })
- | tt::Leaf::Punct(tt::Punct { span, .. })
- | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf;
-
- (span != tt::TokenId::unspecified()).then_some(span.0)
- }
- };
- subtree.token_trees.iter().filter_map(filter).max()
- }
- }
-
- /// Shift given TokenTree token id
- pub fn shift_all(self, tt: &mut tt::Subtree) {
- for t in &mut tt.token_trees {
- match t {
- tt::TokenTree::Leaf(
- tt::Leaf::Ident(tt::Ident { span, .. })
- | tt::Leaf::Punct(tt::Punct { span, .. })
- | tt::Leaf::Literal(tt::Literal { span, .. }),
- ) => *span = self.shift(*span),
- tt::TokenTree::Subtree(tt) => {
- tt.delimiter.open = self.shift(tt.delimiter.open);
- tt.delimiter.close = self.shift(tt.delimiter.close);
- self.shift_all(tt)
- }
- }
- }
- }
-
- pub fn shift(self, id: tt::TokenId) -> tt::TokenId {
- if id == tt::TokenId::unspecified() {
- id
- } else {
- tt::TokenId(id.0 + self.0)
- }
- }
-
- pub fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
- id.0.checked_sub(self.0).map(tt::TokenId)
- }
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Origin {
- Def,
- Call,
-}
-
-impl DeclarativeMacro {
- pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro {
- DeclarativeMacro {
- rules: Box::default(),
- shift: Shift(0),
- is_2021,
- err: Some(Box::new(err)),
- }
+impl<S: Span> DeclarativeMacro<S> {
+ pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro<S> {
+ DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) }
}
/// The old, `macro_rules! m {}` flavor.
- pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
+ pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
// Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier.
@@ -256,11 +183,11 @@ impl DeclarativeMacro {
}
}
- DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
+ DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
}
/// The new, unstable `macro m {}` flavor.
- pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
+ pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
let mut err = None;
@@ -307,36 +234,24 @@ impl DeclarativeMacro {
}
}
- DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
- }
-
- pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
- self.shift.shift_all(&mut tt);
- expander::expand_rules(&self.rules, &tt, self.is_2021)
+ DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
}
pub fn err(&self) -> Option<&ParseError> {
self.err.as_deref()
}
- pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
- self.shift.shift(id)
- }
-
- pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
- match self.shift.unshift(id) {
- Some(id) => (id, Origin::Call),
- None => (id, Origin::Def),
- }
- }
-
- pub fn shift(&self) -> Shift {
- self.shift
+ pub fn expand(
+ &self,
+ tt: &tt::Subtree<S>,
+ marker: impl Fn(&mut S) + Copy,
+ ) -> ExpandResult<tt::Subtree<S>> {
+ expander::expand_rules(&self.rules, &tt, marker, self.is_2021)
}
}
-impl Rule {
- fn parse(src: &mut TtIter<'_>, expect_arrow: bool) -> Result<Self, ParseError> {
+impl<S: Span> Rule<S> {
+ fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result<Self, ParseError> {
let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
if expect_arrow {
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
@@ -351,7 +266,7 @@ impl Rule {
}
}
-fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
+fn validate<S: Span>(pattern: &MetaTemplate<S>) -> Result<(), ParseError> {
for op in pattern.iter() {
match op {
Op::Subtree { tokens, .. } => validate(tokens)?,
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
index 7a143e746..00ba35377 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -3,8 +3,9 @@
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
+use tt::Span;
-use crate::{tt, tt_iter::TtIter, ParseError};
+use crate::{tt_iter::TtIter, ParseError};
/// Consider
///
@@ -20,22 +21,22 @@ use crate::{tt, tt_iter::TtIter, ParseError};
/// Stuff to the right is a [`MetaTemplate`] template which is used to produce
/// output.
#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
+pub(crate) struct MetaTemplate<S>(pub(crate) Box<[Op<S>]>);
-impl MetaTemplate {
- pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+impl<S: Span> MetaTemplate<S> {
+ pub(crate) fn parse_pattern(pattern: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(pattern, Mode::Pattern)
}
- pub(crate) fn parse_template(template: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+ pub(crate) fn parse_template(template: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(template, Mode::Template)
}
- pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &Op<S>> {
self.0.iter()
}
- fn parse(tt: &tt::Subtree, mode: Mode) -> Result<MetaTemplate, ParseError> {
+ fn parse(tt: &tt::Subtree<S>, mode: Mode) -> Result<Self, ParseError> {
let mut src = TtIter::new(tt);
let mut res = Vec::new();
@@ -49,16 +50,16 @@ impl MetaTemplate {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) enum Op {
- Var { name: SmolStr, kind: Option<MetaVarKind>, id: tt::TokenId },
- Ignore { name: SmolStr, id: tt::TokenId },
+pub(crate) enum Op<S> {
+ Var { name: SmolStr, kind: Option<MetaVarKind>, id: S },
+ Ignore { name: SmolStr, id: S },
Index { depth: usize },
Count { name: SmolStr, depth: Option<usize> },
- Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
- Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter },
- Literal(tt::Literal),
- Punct(SmallVec<[tt::Punct; 3]>),
- Ident(tt::Ident),
+ Repeat { tokens: MetaTemplate<S>, kind: RepeatKind, separator: Option<Separator<S>> },
+ Subtree { tokens: MetaTemplate<S>, delimiter: tt::Delimiter<S> },
+ Literal(tt::Literal<S>),
+ Punct(SmallVec<[tt::Punct<S>; 3]>),
+ Ident(tt::Ident<S>),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -87,15 +88,15 @@ pub(crate) enum MetaVarKind {
}
#[derive(Clone, Debug, Eq)]
-pub(crate) enum Separator {
- Literal(tt::Literal),
- Ident(tt::Ident),
- Puncts(SmallVec<[tt::Punct; 3]>),
+pub(crate) enum Separator<S> {
+ Literal(tt::Literal<S>),
+ Ident(tt::Ident<S>),
+ Puncts(SmallVec<[tt::Punct<S>; 3]>),
}
// Note that when we compare a Separator, we just care about its textual value.
-impl PartialEq for Separator {
- fn eq(&self, other: &Separator) -> bool {
+impl<S> PartialEq for Separator<S> {
+ fn eq(&self, other: &Separator<S>) -> bool {
use Separator::*;
match (self, other) {
@@ -117,11 +118,11 @@ enum Mode {
Template,
}
-fn next_op(
- first_peeked: &tt::TokenTree,
- src: &mut TtIter<'_>,
+fn next_op<S: Span>(
+ first_peeked: &tt::TokenTree<S>,
+ src: &mut TtIter<'_, S>,
mode: Mode,
-) -> Result<Op, ParseError> {
+) -> Result<Op<S>, ParseError> {
let res = match first_peeked {
tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
src.next().expect("first token already peeked");
@@ -212,7 +213,10 @@ fn next_op(
Ok(res)
}
-fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<MetaVarKind>, ParseError> {
+fn eat_fragment_kind<S: Span>(
+ src: &mut TtIter<'_, S>,
+ mode: Mode,
+) -> Result<Option<MetaVarKind>, ParseError> {
if let Mode::Pattern = mode {
src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
let ident = src
@@ -240,11 +244,13 @@ fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<MetaVarK
Ok(None)
}
-fn is_boolean_literal(lit: &tt::Literal) -> bool {
+fn is_boolean_literal<S>(lit: &tt::Literal<S>) -> bool {
matches!(lit.text.as_str(), "true" | "false")
}
-fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
+fn parse_repeat<S: Span>(
+ src: &mut TtIter<'_, S>,
+) -> Result<(Option<Separator<S>>, RepeatKind), ParseError> {
let mut separator = Separator::Puncts(SmallVec::new());
for tt in src {
let tt = match tt {
@@ -281,7 +287,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind),
Err(ParseError::InvalidRepeat)
}
-fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
+fn parse_metavar_expr<S: Span>(src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> {
let func = src.expect_ident()?;
let args = src.expect_subtree()?;
@@ -314,7 +320,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
Ok(op)
}
-fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
+fn parse_depth<S: Span>(src: &mut TtIter<'_, S>) -> Result<usize, ()> {
if src.len() == 0 {
Ok(0)
} else if let tt::Leaf::Literal(lit) = src.expect_literal()? {
@@ -325,7 +331,7 @@ fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
}
}
-fn try_eat_comma(src: &mut TtIter<'_>) -> bool {
+fn try_eat_comma<S: Span>(src: &mut TtIter<'_, S>) -> bool {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) {
let _ = src.next();
return true;
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index 7b9bb61e6..b89bfd74a 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -1,98 +1,102 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
-use rustc_hash::FxHashMap;
-use stdx::{always, non_empty_vec::NonEmptyVec};
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::{never, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
};
-
-use crate::{
- to_parser_input::to_parser_input,
- tt::{
- self,
- buffer::{Cursor, TokenBuffer},
- },
- tt_iter::TtIter,
- TokenMap,
+use tt::{
+ buffer::{Cursor, TokenBuffer},
+ Span, SpanData, SyntaxContext,
};
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap};
+
#[cfg(test)]
mod tests;
-/// Convert the syntax node to a `TokenTree` (what macro
-/// will consume).
-pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
- let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
- node,
- Default::default(),
- 0,
- Default::default(),
- Default::default(),
- );
- (subtree, token_map)
+pub trait SpanMapper<S: Span> {
+ fn span_for(&self, range: TextRange) -> S;
}
-/// Convert the syntax node to a `TokenTree` (what macro will consume)
-/// with the censored range excluded.
-pub fn syntax_node_to_token_tree_with_modifications(
- node: &SyntaxNode,
- existing_token_map: TokenMap,
- next_id: u32,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
-) -> (tt::Subtree, TokenMap, u32) {
- let global_offset = node.text_range().start();
- let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
- let subtree = convert_tokens(&mut c);
- c.id_alloc.map.shrink_to_fit();
- always!(c.replace.is_empty(), "replace: {:?}", c.replace);
- always!(c.append.is_empty(), "append: {:?}", c.append);
- (subtree, c.id_alloc.map, c.id_alloc.next_id)
+impl<S: Span> SpanMapper<S> for SpanMap<S> {
+ fn span_for(&self, range: TextRange) -> S {
+ self.span_at(range.start())
+ }
}
-/// Convert the syntax node to a `TokenTree` (what macro
-/// will consume).
-pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap {
- syntax_node_to_token_map_with_modifications(
- node,
- Default::default(),
- 0,
- Default::default(),
- Default::default(),
- )
- .0
+impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
+ fn span_for(&self, range: TextRange) -> S {
+ SM::span_for(self, range)
+ }
}
-/// Convert the syntax node to a `TokenTree` (what macro will consume)
-/// with the censored range excluded.
-pub fn syntax_node_to_token_map_with_modifications(
- node: &SyntaxNode,
- existing_token_map: TokenMap,
- next_id: u32,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
-) -> (TokenMap, u32) {
- let global_offset = node.text_range().start();
- let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
- collect_tokens(&mut c);
- c.id_alloc.map.shrink_to_fit();
- always!(c.replace.is_empty(), "replace: {:?}", c.replace);
- always!(c.append.is_empty(), "append: {:?}", c.append);
- (c.id_alloc.map, c.id_alloc.next_id)
+/// Dummy things for testing where spans don't matter.
+pub(crate) mod dummy_test_span_utils {
+ use super::*;
+
+ pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
+ pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY;
+
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub struct DummyTestSpanAnchor;
+ impl tt::SpanAnchor for DummyTestSpanAnchor {
+ const DUMMY: Self = DummyTestSpanAnchor;
+ }
+ #[derive(Debug, Copy, Clone, PartialEq, Eq)]
+ pub struct DummyTestSyntaxContext;
+ impl SyntaxContext for DummyTestSyntaxContext {
+ const DUMMY: Self = DummyTestSyntaxContext;
+ }
+
+ pub struct DummyTestSpanMap;
+
+ impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap {
+ fn span_for(
+ &self,
+ range: syntax::TextRange,
+ ) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> {
+ tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext }
+ }
+ }
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct SyntheticTokenId(pub u32);
+/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
+/// subtree's spans.
+pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
+ node: &SyntaxNode,
+ map: SpanMap,
+) -> tt::Subtree<SpanData<Anchor, Ctx>>
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+ SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+{
+ let mut c = Converter::new(node, map, Default::default(), Default::default());
+ convert_tokens(&mut c)
+}
-#[derive(Debug, Clone)]
-pub struct SyntheticToken {
- pub kind: SyntaxKind,
- pub text: SmolStr,
- pub range: TextRange,
- pub id: SyntheticTokenId,
+/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
+/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
+/// be injected or hidden from the output.
+pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>(
+ node: &SyntaxNode,
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Anchor, Ctx>>>>,
+ remove: FxHashSet<SyntaxNode>,
+) -> tt::Subtree<SpanData<Anchor, Ctx>>
+where
+ SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
+ let mut c = Converter::new(node, map, append, remove);
+ convert_tokens(&mut c)
}
// The following items are what `rustc` macro can be parsed into :
@@ -107,10 +111,17 @@ pub struct SyntheticToken {
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
-pub fn token_tree_to_syntax_node(
- tt: &tt::Subtree,
+/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
+/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
+pub fn token_tree_to_syntax_node<Anchor, Ctx>(
+ tt: &tt::Subtree<SpanData<Anchor, Ctx>>,
entry_point: parser::TopEntryPoint,
-) -> (Parse<SyntaxNode>, TokenMap) {
+) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>)
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
let buffer = match tt {
tt::Subtree {
delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. },
@@ -137,29 +148,41 @@ pub fn token_tree_to_syntax_node(
tree_sink.finish()
}
-/// Convert a string to a `TokenTree`
-pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
+/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
+/// anchor with the given context.
+pub fn parse_to_token_tree<Anchor, Ctx>(
+ anchor: Anchor,
+ ctx: Ctx,
+ text: &str,
+) -> Option<tt::Subtree<SpanData<Anchor, Ctx>>>
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() {
return None;
}
+ let mut conv = RawConverter { lexed, pos: 0, anchor, ctx };
+ Some(convert_tokens(&mut conv))
+}
- let mut conv = RawConverter {
- lexed,
- pos: 0,
- id_alloc: TokenIdAlloc {
- map: Default::default(),
- global_offset: TextSize::default(),
- next_id: 0,
- },
- };
-
- let subtree = convert_tokens(&mut conv);
- Some((subtree, conv.id_alloc.map))
+/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
+pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
+where
+ S: Span,
+{
+ let lexed = parser::LexedStr::new(text);
+ if lexed.errors().next().is_some() {
+ return None;
+ }
+ let mut conv = StaticRawConverter { lexed, pos: 0, span };
+ Some(convert_tokens(&mut conv))
}
/// Split token tree with separate expr: $($e:expr)SEP*
-pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
+pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
if tt.token_trees.is_empty() {
return Vec::new();
}
@@ -172,10 +195,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res.push(match expanded.value {
None => break,
- Some(tt @ tt::TokenTree::Leaf(_)) => {
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] }
- }
- Some(tt::TokenTree::Subtree(tt)) => tt,
+ Some(tt) => tt.subtree_or_wrap(),
});
let mut fork = iter.clone();
@@ -187,7 +207,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
if iter.peek_n(0).is_some() {
res.push(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: iter.cloned().collect(),
});
}
@@ -195,136 +215,118 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res
}
-fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
- struct StackEntry {
- subtree: tt::Subtree,
- idx: usize,
- open_range: TextRange,
- }
-
- let entry = StackEntry {
- subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
- // never used (delimiter is `None`)
- idx: !0,
- open_range: TextRange::empty(TextSize::of('.')),
- };
+fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
+where
+ C: TokenConverter<S>,
+ S: Span,
+{
+ let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] };
let mut stack = NonEmptyVec::new(entry);
- loop {
- let StackEntry { subtree, .. } = stack.last_mut();
- let result = &mut subtree.token_trees;
- let (token, range) = match conv.bump() {
- Some(it) => it,
- None => break,
- };
- let synth_id = token.synthetic_id(conv);
-
- let kind = token.kind(conv);
- if kind == COMMENT {
- // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
- // figure out which token id to use for the doc comment, if it is converted successfully.
- let next_id = conv.id_alloc().peek_next_id();
- if let Some(tokens) = conv.convert_doc_comment(&token, next_id) {
- let id = conv.id_alloc().alloc(range, synth_id);
- debug_assert_eq!(id, next_id);
- result.extend(tokens);
- }
- continue;
- }
- let tt = if kind.is_punct() && kind != UNDERSCORE {
- if synth_id.is_none() {
- assert_eq!(range.len(), TextSize::of('.'));
- }
-
- let expected = match subtree.delimiter.kind {
- tt::DelimiterKind::Parenthesis => Some(T![')']),
- tt::DelimiterKind::Brace => Some(T!['}']),
- tt::DelimiterKind::Bracket => Some(T![']']),
- tt::DelimiterKind::Invisible => None,
- };
-
- if let Some(expected) = expected {
- if kind == expected {
- if let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, Some(range));
- stack.last_mut().subtree.token_trees.push(entry.subtree.into());
+ while let Some((token, abs_range)) = conv.bump() {
+ let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
+
+ let tt = match token.as_leaf() {
+ Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
+ None => match token.kind(conv) {
+ // Desugar doc comments into doc attributes
+ COMMENT => {
+ let span = conv.span_for(abs_range);
+ if let Some(tokens) = conv.convert_doc_comment(&token, span) {
+ result.extend(tokens);
}
continue;
}
- }
-
- let delim = match kind {
- T!['('] => Some(tt::DelimiterKind::Parenthesis),
- T!['{'] => Some(tt::DelimiterKind::Brace),
- T!['['] => Some(tt::DelimiterKind::Bracket),
- _ => None,
- };
+ kind if kind.is_punct() && kind != UNDERSCORE => {
+ let expected = match delimiter.kind {
+ tt::DelimiterKind::Parenthesis => Some(T![')']),
+ tt::DelimiterKind::Brace => Some(T!['}']),
+ tt::DelimiterKind::Bracket => Some(T![']']),
+ tt::DelimiterKind::Invisible => None,
+ };
+
+ // Current token is a closing delimiter that we expect, fix up the closing span
+ // and end the subtree here
+ if matches!(expected, Some(expected) if expected == kind) {
+ if let Some(mut subtree) = stack.pop() {
+ subtree.delimiter.close = conv.span_for(abs_range);
+ stack.last_mut().token_trees.push(subtree.into());
+ }
+ continue;
+ }
- if let Some(kind) = delim {
- let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
- let subtree = tt::Subtree {
- delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind },
- token_trees: vec![],
- };
- stack.push(StackEntry { subtree, idx, open_range: range });
- continue;
- }
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ // Start a new subtree
+ if let Some(kind) = delim {
+ let open = conv.span_for(abs_range);
+ stack.push(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open,
+ // will be overwritten on subtree close above
+ close: open,
+ kind,
+ },
+ token_trees: vec![],
+ });
+ continue;
+ }
- let spacing = match conv.peek().map(|next| next.kind(conv)) {
- Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
- _ => tt::Spacing::Alone,
- };
- let char = match token.to_char(conv) {
- Some(c) => c,
- None => {
- panic!("Token from lexer must be single char: token = {token:#?}");
+ let spacing = match conv.peek().map(|next| next.kind(conv)) {
+ Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
+ _ => tt::Spacing::Alone,
+ };
+ let Some(char) = token.to_char(conv) else {
+ panic!("Token from lexer must be single char: token = {token:#?}")
+ };
+ tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
+ .into()
}
- };
- tt::Leaf::from(tt::Punct {
- char,
- spacing,
- span: conv.id_alloc().alloc(range, synth_id),
- })
- .into()
- } else {
- macro_rules! make_leaf {
- ($i:ident) => {
- tt::$i {
- span: conv.id_alloc().alloc(range, synth_id),
- text: token.to_text(conv),
+ kind => {
+ macro_rules! make_leaf {
+ ($i:ident) => {
+ tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }
+ .into()
+ };
}
- .into()
- };
- }
- let leaf: tt::Leaf = match kind {
- T![true] | T![false] => make_leaf!(Ident),
- IDENT => make_leaf!(Ident),
- UNDERSCORE => make_leaf!(Ident),
- k if k.is_keyword() => make_leaf!(Ident),
- k if k.is_literal() => make_leaf!(Literal),
- LIFETIME_IDENT => {
- let char_unit = TextSize::of('\'');
- let r = TextRange::at(range.start(), char_unit);
- let apostrophe = tt::Leaf::from(tt::Punct {
- char: '\'',
- spacing: tt::Spacing::Joint,
- span: conv.id_alloc().alloc(r, synth_id),
- });
- result.push(apostrophe.into());
-
- let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
- let ident = tt::Leaf::from(tt::Ident {
- text: SmolStr::new(&token.to_text(conv)[1..]),
- span: conv.id_alloc().alloc(r, synth_id),
- });
- result.push(ident.into());
- continue;
- }
- _ => continue,
- };
+ let leaf: tt::Leaf<_> = match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let apostrophe = tt::Leaf::from(tt::Punct {
+ char: '\'',
+ spacing: tt::Spacing::Joint,
+ span: conv
+ .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
+ });
+ result.push(apostrophe.into());
+
+ let ident = tt::Leaf::from(tt::Ident {
+ text: SmolStr::new(&token.to_text(conv)[1..]),
+ span: conv.span_for(TextRange::new(
+ abs_range.start() + TextSize::of('\''),
+ abs_range.end(),
+ )),
+ });
+ result.push(ident.into());
+ continue;
+ }
+ _ => continue,
+ };
- leaf.into()
+ leaf.into()
+ }
+ },
};
+
result.push(tt);
}
@@ -334,10 +336,9 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
while let Some(entry) = stack.pop() {
let parent = stack.last_mut();
- conv.id_alloc().close_delim(entry.idx, None);
- let leaf: tt::Leaf = tt::Punct {
- span: conv.id_alloc().alloc(entry.open_range, None),
- char: match entry.subtree.delimiter.kind {
+ let leaf: tt::Leaf<_> = tt::Punct {
+ span: entry.delimiter.open,
+ char: match entry.delimiter.kind {
tt::DelimiterKind::Parenthesis => '(',
tt::DelimiterKind::Brace => '{',
tt::DelimiterKind::Bracket => '[',
@@ -346,11 +347,11 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
spacing: tt::Spacing::Alone,
}
.into();
- parent.subtree.token_trees.push(leaf.into());
- parent.subtree.token_trees.extend(entry.subtree.token_trees);
+ parent.token_trees.push(leaf.into());
+ parent.token_trees.extend(entry.token_trees);
}
- let subtree = stack.into_last().subtree;
+ let subtree = stack.into_last();
if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
first.clone()
} else {
@@ -358,111 +359,6 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
}
}
-fn collect_tokens<C: TokenConverter>(conv: &mut C) {
- struct StackEntry {
- idx: usize,
- open_range: TextRange,
- delimiter: tt::DelimiterKind,
- }
-
- let entry = StackEntry {
- delimiter: tt::DelimiterKind::Invisible,
- // never used (delimiter is `None`)
- idx: !0,
- open_range: TextRange::empty(TextSize::of('.')),
- };
- let mut stack = NonEmptyVec::new(entry);
-
- loop {
- let StackEntry { delimiter, .. } = stack.last_mut();
- let (token, range) = match conv.bump() {
- Some(it) => it,
- None => break,
- };
- let synth_id = token.synthetic_id(conv);
-
- let kind = token.kind(conv);
- if kind == COMMENT {
- // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
- // figure out which token id to use for the doc comment, if it is converted successfully.
- let next_id = conv.id_alloc().peek_next_id();
- if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) {
- let id = conv.id_alloc().alloc(range, synth_id);
- debug_assert_eq!(id, next_id);
- }
- continue;
- }
- if kind.is_punct() && kind != UNDERSCORE {
- if synth_id.is_none() {
- assert_eq!(range.len(), TextSize::of('.'));
- }
-
- let expected = match delimiter {
- tt::DelimiterKind::Parenthesis => Some(T![')']),
- tt::DelimiterKind::Brace => Some(T!['}']),
- tt::DelimiterKind::Bracket => Some(T![']']),
- tt::DelimiterKind::Invisible => None,
- };
-
- if let Some(expected) = expected {
- if kind == expected {
- if let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, Some(range));
- }
- continue;
- }
- }
-
- let delim = match kind {
- T!['('] => Some(tt::DelimiterKind::Parenthesis),
- T!['{'] => Some(tt::DelimiterKind::Brace),
- T!['['] => Some(tt::DelimiterKind::Bracket),
- _ => None,
- };
-
- if let Some(kind) = delim {
- let (_id, idx) = conv.id_alloc().open_delim(range, synth_id);
-
- stack.push(StackEntry { idx, open_range: range, delimiter: kind });
- continue;
- }
-
- conv.id_alloc().alloc(range, synth_id);
- } else {
- macro_rules! make_leaf {
- ($i:ident) => {{
- conv.id_alloc().alloc(range, synth_id);
- }};
- }
- match kind {
- T![true] | T![false] => make_leaf!(Ident),
- IDENT => make_leaf!(Ident),
- UNDERSCORE => make_leaf!(Ident),
- k if k.is_keyword() => make_leaf!(Ident),
- k if k.is_literal() => make_leaf!(Literal),
- LIFETIME_IDENT => {
- let char_unit = TextSize::of('\'');
- let r = TextRange::at(range.start(), char_unit);
- conv.id_alloc().alloc(r, synth_id);
-
- let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
- conv.id_alloc().alloc(r, synth_id);
- continue;
- }
- _ => continue,
- };
- };
-
- // If we get here, we've consumed all input tokens.
- // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
- // Merge them so we're left with one.
- while let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, None);
- conv.id_alloc().alloc(entry.open_range, None);
- }
- }
-}
-
fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!(
kind,
@@ -511,162 +407,126 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
text.into()
}
-fn convert_doc_comment(
+fn convert_doc_comment<S: Copy>(
token: &syntax::SyntaxToken,
- span: tt::TokenId,
-) -> Option<Vec<tt::TokenTree>> {
+ span: S,
+) -> Option<Vec<tt::TokenTree<S>>> {
cov_mark::hit!(test_meta_doc_comments);
let comment = ast::Comment::cast(token.clone())?;
let doc = comment.kind().doc?;
- // Make `doc="\" Comments\""
- let meta_tkns =
- vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)];
+ let mk_ident =
+ |s: &str| tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }));
- // Make `#![]`
- let mut token_trees = Vec::with_capacity(3);
- token_trees.push(mk_punct('#', span));
- if let ast::CommentPlacement::Inner = doc {
- token_trees.push(mk_punct('!', span));
- }
- token_trees.push(tt::TokenTree::from(tt::Subtree {
- delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
- token_trees: meta_tkns,
- }));
-
- return Some(token_trees);
-
- // Helper functions
- fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree {
- tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }))
- }
-
- fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree {
+ let mk_punct = |c: char| {
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
char: c,
spacing: tt::Spacing::Alone,
span,
}))
- }
+ };
- fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree {
+ let mk_doc_literal = |comment: &ast::Comment| {
let lit = tt::Literal { text: doc_comment_text(comment), span };
tt::TokenTree::from(tt::Leaf::from(lit))
- }
-}
-
-struct TokenIdAlloc {
- map: TokenMap,
- global_offset: TextSize,
- next_id: u32,
-}
-
-impl TokenIdAlloc {
- fn alloc(
- &mut self,
- absolute_range: TextRange,
- synthetic_id: Option<SyntheticTokenId>,
- ) -> tt::TokenId {
- let relative_range = absolute_range - self.global_offset;
- let token_id = tt::TokenId(self.next_id);
- self.next_id += 1;
- self.map.insert(token_id, relative_range);
- if let Some(id) = synthetic_id {
- self.map.insert_synthetic(token_id, id);
- }
- token_id
- }
+ };
- fn open_delim(
- &mut self,
- open_abs_range: TextRange,
- synthetic_id: Option<SyntheticTokenId>,
- ) -> (tt::TokenId, usize) {
- let token_id = tt::TokenId(self.next_id);
- self.next_id += 1;
- let idx = self.map.insert_delim(
- token_id,
- open_abs_range - self.global_offset,
- open_abs_range - self.global_offset,
- );
- if let Some(id) = synthetic_id {
- self.map.insert_synthetic(token_id, id);
- }
- (token_id, idx)
- }
+ // Make `doc="\" Comments\""
+ let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
- fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
- match close_abs_range {
- None => {
- self.map.remove_delim(idx);
- }
- Some(close) => {
- self.map.update_close_delim(idx, close - self.global_offset);
- }
- }
+ // Make `#![]`
+ let mut token_trees = Vec::with_capacity(3);
+ token_trees.push(mk_punct('#'));
+ if let ast::CommentPlacement::Inner = doc {
+ token_trees.push(mk_punct('!'));
}
+ token_trees.push(tt::TokenTree::from(tt::Subtree {
+ delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+ token_trees: meta_tkns,
+ }));
- fn peek_next_id(&self) -> tt::TokenId {
- tt::TokenId(self.next_id)
- }
+ Some(token_trees)
}
/// A raw token (straight from lexer) converter
-struct RawConverter<'a> {
+struct RawConverter<'a, Anchor, Ctx> {
+ lexed: parser::LexedStr<'a>,
+ pos: usize,
+ anchor: Anchor,
+ ctx: Ctx,
+}
+/// A raw token (straight from lexer) converter that gives every token the same span.
+struct StaticRawConverter<'a, S> {
lexed: parser::LexedStr<'a>,
pos: usize,
- id_alloc: TokenIdAlloc,
+ span: S,
}
-trait SrcToken<Ctx>: std::fmt::Debug {
+trait SrcToken<Ctx, S>: std::fmt::Debug {
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
fn to_char(&self, ctx: &Ctx) -> Option<char>;
fn to_text(&self, ctx: &Ctx) -> SmolStr;
- fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
+ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
+ None
+ }
}
-trait TokenConverter: Sized {
- type Token: SrcToken<Self>;
+trait TokenConverter<S>: Sized {
+ type Token: SrcToken<Self, S>;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: tt::TokenId,
- ) -> Option<Vec<tt::TokenTree>>;
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
fn peek(&self) -> Option<Self::Token>;
- fn id_alloc(&mut self) -> &mut TokenIdAlloc;
+ fn span_for(&self, range: TextRange) -> S;
}
-impl SrcToken<RawConverter<'_>> for usize {
- fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
+impl<Anchor, S, Ctx> SrcToken<RawConverter<'_, Anchor, Ctx>, S> for usize {
+ fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
+ fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
- fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
+ fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr {
ctx.lexed.text(*self).into()
}
+}
- fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option<SyntheticTokenId> {
- None
+impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
+ fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
+ ctx.lexed.kind(*self)
+ }
+
+ fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option<char> {
+ ctx.lexed.text(*self).chars().next()
+ }
+
+ fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
+ ctx.lexed.text(*self).into()
}
}
-impl TokenConverter for RawConverter<'_> {
+impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<SpanData<Anchor, Ctx>>
+ for RawConverter<'_, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
type Token = usize;
- fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
+ fn convert_doc_comment(
+ &self,
+ &token: &usize,
+ span: SpanData<Anchor, Ctx>,
+ ) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span)
}
@@ -678,7 +538,7 @@ impl TokenConverter for RawConverter<'_> {
let token = self.pos;
self.pos += 1;
let range = self.lexed.text_range(token);
- let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
Some((token, range))
}
@@ -690,137 +550,165 @@ impl TokenConverter for RawConverter<'_> {
Some(self.pos)
}
- fn id_alloc(&mut self) -> &mut TokenIdAlloc {
- &mut self.id_alloc
+ fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx> {
+ SpanData { range, anchor: self.anchor, ctx: self.ctx }
}
}
-struct Converter {
- id_alloc: TokenIdAlloc,
+impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
+where
+ S: Span,
+{
+ type Token = usize;
+
+ fn convert_doc_comment(&self, &token: &usize, span: S) -> Option<Vec<tt::TokenTree<S>>> {
+ let text = self.lexed.text(token);
+ convert_doc_comment(&doc_comment(text), span)
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ let token = self.pos;
+ self.pos += 1;
+ let range = self.lexed.text_range(token);
+ let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
+
+ Some((token, range))
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ Some(self.pos)
+ }
+
+ fn span_for(&self, _: TextRange) -> S {
+ self.span
+ }
+}
+
+struct Converter<SpanMap, S> {
current: Option<SyntaxToken>,
- current_synthetic: Vec<SyntheticToken>,
+ current_leafs: Vec<tt::Leaf<S>>,
preorder: PreorderWithTokens,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
+ /// Used to make the emitted text ranges in the spans relative to the span anchor.
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ remove: FxHashSet<SyntaxNode>,
}
-impl Converter {
+impl<SpanMap, S> Converter<SpanMap, S> {
fn new(
node: &SyntaxNode,
- global_offset: TextSize,
- existing_token_map: TokenMap,
- next_id: u32,
- mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- ) -> Converter {
- let range = node.text_range();
- let mut preorder = node.preorder_with_tokens();
- let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
- Converter {
- id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
- current: first,
- current_synthetic: synthetic,
- preorder,
- range,
- replace,
- append,
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ remove: FxHashSet<SyntaxNode>,
+ ) -> Self {
+ let mut this = Converter {
+ current: None,
+ preorder: node.preorder_with_tokens(),
+ range: node.text_range(),
punct_offset: None,
- }
- }
-
- fn next_token(
- preorder: &mut PreorderWithTokens,
- replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
- while let Some(ev) = preorder.next() {
- let ele = match ev {
- WalkEvent::Enter(ele) => ele,
- WalkEvent::Leave(ele) => {
- if let Some(mut v) = append.remove(&ele) {
- if !v.is_empty() {
- v.reverse();
- return (None, v);
- }
+ map,
+ append,
+ remove,
+ current_leafs: vec![],
+ };
+ let first = this.next_token();
+ this.current = first;
+ this
+ }
+
+ fn next_token(&mut self) -> Option<SyntaxToken> {
+ while let Some(ev) = self.preorder.next() {
+ match ev {
+ WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
+ WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
+ self.preorder.skip_subtree();
+ if let Some(mut v) = self.append.remove(&n.into()) {
+ v.reverse();
+ self.current_leafs.extend(v);
+ return None;
}
- continue;
}
- };
- if let Some(mut v) = replace.remove(&ele) {
- preorder.skip_subtree();
- if !v.is_empty() {
- v.reverse();
- return (None, v);
+ WalkEvent::Enter(SyntaxElement::Node(_)) => (),
+ WalkEvent::Leave(ele) => {
+ if let Some(mut v) = self.append.remove(&ele) {
+ v.reverse();
+ self.current_leafs.extend(v);
+ return None;
+ }
}
}
- match ele {
- SyntaxElement::Token(t) => return (Some(t), Vec::new()),
- _ => {}
- }
}
- (None, Vec::new())
+ None
}
}
#[derive(Debug)]
-enum SynToken {
+enum SynToken<S> {
Ordinary(SyntaxToken),
- // FIXME is this supposed to be `Punct`?
- Punch(SyntaxToken, TextSize),
- Synthetic(SyntheticToken),
+ Punct { token: SyntaxToken, offset: usize },
+ Leaf(tt::Leaf<S>),
}
-impl SynToken {
- fn token(&self) -> Option<&SyntaxToken> {
+impl<S> SynToken<S> {
+ fn token(&self) -> &SyntaxToken {
match self {
- SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
- SynToken::Synthetic(_) => None,
+ SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it,
+ SynToken::Leaf(_) => unreachable!(),
}
}
}
-impl SrcToken<Converter> for SynToken {
- fn kind(&self, ctx: &Converter) -> SyntaxKind {
+impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
+ fn kind(&self, ctx: &Converter<SpanMap, S>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
- SynToken::Punch(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
- SynToken::Synthetic(token) => token.kind,
+ SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
+ SynToken::Leaf(_) => {
+ never!();
+ SyntaxKind::ERROR
+ }
}
}
- fn to_char(&self, _ctx: &Converter) -> Option<char> {
+ fn to_char(&self, _ctx: &Converter<SpanMap, S>) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
- SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
- SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
- SynToken::Synthetic(_) => None,
+ SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
+ SynToken::Leaf(_) => None,
}
}
- fn to_text(&self, _ctx: &Converter) -> SmolStr {
+ fn to_text(&self, _ctx: &Converter<SpanMap, S>) -> SmolStr {
match self {
- SynToken::Ordinary(token) => token.text().into(),
- SynToken::Punch(token, _) => token.text().into(),
- SynToken::Synthetic(token) => token.text.clone(),
+ SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
+ SynToken::Leaf(_) => {
+ never!();
+ "".into()
+ }
}
}
-
- fn synthetic_id(&self, _ctx: &Converter) -> Option<SyntheticTokenId> {
+ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
match self {
- SynToken::Synthetic(token) => Some(token.id),
- _ => None,
+ SynToken::Ordinary(_) | SynToken::Punct { .. } => None,
+ SynToken::Leaf(it) => Some(it),
}
}
}
-impl TokenConverter for Converter {
- type Token = SynToken;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: tt::TokenId,
- ) -> Option<Vec<tt::TokenTree>> {
- convert_doc_comment(token.token()?, span)
+impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
+where
+ S: Span,
+ SpanMap: SpanMapper<S>,
+{
+ type Token = SynToken<S>;
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
+ convert_doc_comment(token.token(), span)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@@ -830,34 +718,31 @@ impl TokenConverter for Converter {
let range = punct.text_range();
self.punct_offset = Some((punct.clone(), offset));
let range = TextRange::at(range.start() + offset, TextSize::of('.'));
- return Some((SynToken::Punch(punct, offset), range));
+ return Some((
+ SynToken::Punct { token: punct, offset: u32::from(offset) as usize },
+ range,
+ ));
}
}
- if let Some(synth_token) = self.current_synthetic.pop() {
- if self.current_synthetic.is_empty() {
- let (new_current, new_synth) =
- Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
- self.current = new_current;
- self.current_synthetic = new_synth;
+ if let Some(leaf) = self.current_leafs.pop() {
+ if self.current_leafs.is_empty() {
+ self.current = self.next_token();
}
- let range = synth_token.range;
- return Some((SynToken::Synthetic(synth_token), range));
+ return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
}
let curr = self.current.clone()?;
if !self.range.contains_range(curr.text_range()) {
return None;
}
- let (new_current, new_synth) =
- Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
- self.current = new_current;
- self.current_synthetic = new_synth;
+
+ self.current = self.next_token();
let token = if curr.kind().is_punct() {
self.punct_offset = Some((curr.clone(), 0.into()));
let range = curr.text_range();
let range = TextRange::at(range.start(), TextSize::of('.'));
- (SynToken::Punch(curr, 0.into()), range)
+ (SynToken::Punct { token: curr, offset: 0 as usize }, range)
} else {
self.punct_offset = None;
let range = curr.text_range();
@@ -871,55 +756,55 @@ impl TokenConverter for Converter {
if let Some((punct, mut offset)) = self.punct_offset.clone() {
offset += TextSize::of('.');
if usize::from(offset) < punct.text().len() {
- return Some(SynToken::Punch(punct, offset));
+ return Some(SynToken::Punct { token: punct, offset: usize::from(offset) });
}
}
- if let Some(synth_token) = self.current_synthetic.last() {
- return Some(SynToken::Synthetic(synth_token.clone()));
- }
-
let curr = self.current.clone()?;
if !self.range.contains_range(curr.text_range()) {
return None;
}
let token = if curr.kind().is_punct() {
- SynToken::Punch(curr, 0.into())
+ SynToken::Punct { token: curr, offset: 0 as usize }
} else {
SynToken::Ordinary(curr)
};
Some(token)
}
- fn id_alloc(&mut self) -> &mut TokenIdAlloc {
- &mut self.id_alloc
+ fn span_for(&self, range: TextRange) -> S {
+ self.map.span_for(range)
}
}
-struct TtTreeSink<'a> {
+struct TtTreeSink<'a, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
buf: String,
- cursor: Cursor<'a>,
- open_delims: FxHashMap<tt::TokenId, TextSize>,
+ cursor: Cursor<'a, SpanData<Anchor, Ctx>>,
text_pos: TextSize,
inner: SyntaxTreeBuilder,
- token_map: TokenMap,
+ token_map: SpanMap<SpanData<Anchor, Ctx>>,
}
-impl<'a> TtTreeSink<'a> {
- fn new(cursor: Cursor<'a>) -> Self {
+impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
+ fn new(cursor: Cursor<'a, SpanData<Anchor, Ctx>>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
- open_delims: FxHashMap::default(),
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
- token_map: TokenMap::default(),
+ token_map: SpanMap::empty(),
}
}
- fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
- self.token_map.shrink_to_fit();
+ fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>) {
+ self.token_map.finish();
(self.inner.finish(), self.token_map)
}
}
@@ -936,27 +821,34 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
-impl TtTreeSink<'_> {
+impl<Anchor, Ctx> TtTreeSink<'_, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
- let (text, _span) = match self.cursor.token_tree() {
+ let (text, span) = match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
(lit.text.as_str(), lit.span)
}
_ => unreachable!(),
};
+ // FIXME: Span splitting
match text.split_once('.') {
Some((left, right)) => {
assert!(!left.is_empty());
+
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, left);
self.inner.finish_node();
+ self.token_map.push(self.text_pos + TextSize::of(left), span);
// here we move the exit up, the original exit has been deleted in process
self.inner.finish_node();
self.inner.token(SyntaxKind::DOT, ".");
+ self.token_map.push(self.text_pos + TextSize::of(left) + TextSize::of("."), span);
if has_pseudo_dot {
assert!(right.is_empty(), "{left}.{right}");
@@ -964,11 +856,13 @@ impl TtTreeSink<'_> {
assert!(!right.is_empty(), "{left}.{right}");
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, right);
+ self.token_map.push(self.text_pos + TextSize::of(text), span);
self.inner.finish_node();
// the parser creates an unbalanced start node, we are required to close it here
self.inner.finish_node();
}
+ self.text_pos += TextSize::of(text);
}
None => unreachable!(),
}
@@ -987,11 +881,11 @@ impl TtTreeSink<'_> {
break;
}
last = self.cursor;
- let text: &str = loop {
+ let (text, span) = loop {
break match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
// Mark the range if needed
- let (text, id) = match leaf {
+ let (text, span) = match leaf {
tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span),
tt::Leaf::Punct(punct) => {
assert!(punct.char.is_ascii());
@@ -1003,18 +897,13 @@ impl TtTreeSink<'_> {
}
tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span),
};
- let range = TextRange::at(self.text_pos, TextSize::of(text));
- self.token_map.insert(id, range);
self.cursor = self.cursor.bump();
- text
+ (text, span)
}
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
self.cursor = self.cursor.subtree().unwrap();
match delim_to_str(subtree.delimiter.kind, false) {
- Some(it) => {
- self.open_delims.insert(subtree.delimiter.open, self.text_pos);
- it
- }
+ Some(it) => (it, subtree.delimiter.open),
None => continue,
}
}
@@ -1022,21 +911,7 @@ impl TtTreeSink<'_> {
let parent = self.cursor.end().unwrap();
self.cursor = self.cursor.bump();
match delim_to_str(parent.delimiter.kind, true) {
- Some(it) => {
- if let Some(open_delim) =
- self.open_delims.get(&parent.delimiter.open)
- {
- let open_range = TextRange::at(*open_delim, TextSize::of('('));
- let close_range =
- TextRange::at(self.text_pos, TextSize::of('('));
- self.token_map.insert_delim(
- parent.delimiter.open,
- open_range,
- close_range,
- );
- }
- it
- }
+ Some(it) => (it, parent.delimiter.close),
None => continue,
}
}
@@ -1044,10 +919,12 @@ impl TtTreeSink<'_> {
};
self.buf += text;
self.text_pos += TextSize::of(text);
+ self.token_map.push(self.text_pos, span);
}
self.inner.token(kind, self.buf.as_str());
self.buf.clear();
+ // FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
// Add whitespace between adjoint puncts
let next = last.bump();
if let (
@@ -1063,6 +940,7 @@ impl TtTreeSink<'_> {
if curr.spacing == tt::Spacing::Alone && curr.char != ';' && next.char != '\'' {
self.inner.token(WHITESPACE, " ");
self.text_pos += TextSize::of(' ');
+ self.token_map.push(self.text_pos, curr.span);
}
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
index fa0125f3e..bd8187a14 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
@@ -7,21 +7,20 @@ use tt::{
Leaf, Punct, Spacing,
};
-use super::syntax_node_to_token_tree;
+use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap};
fn check_punct_spacing(fixture: &str) {
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
- let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax());
+ let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap);
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
.into_iter()
.map(|(range, annotation)| {
- let token = token_map.token_by_range(range).expect("no token found");
let spacing = match annotation.as_str() {
"Alone" => Spacing::Alone,
"Joint" => Spacing::Joint,
a => panic!("unknown annotation: {a}"),
};
- (token, spacing)
+ (range, spacing)
})
.collect();
@@ -29,8 +28,12 @@ fn check_punct_spacing(fixture: &str) {
let mut cursor = buf.begin();
while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() {
- if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree {
- if let Some(expected) = annotations.remove(span) {
+ if let TokenTreeRef::Leaf(
+ Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
+ _,
+ ) = token_tree
+ {
+ if let Some(expected) = annotations.remove(range) {
assert_eq!(expected, *spacing);
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
index 051e20b3a..00a14f046 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
@@ -3,9 +3,9 @@
use syntax::{SyntaxKind, SyntaxKind::*, T};
-use crate::tt::buffer::TokenBuffer;
+use tt::{buffer::TokenBuffer, Span};
-pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
+pub(crate) fn to_parser_input<S: Span>(buffer: &TokenBuffer<'_, S>) -> parser::Input {
let mut res = parser::Input::default();
let mut current = buffer.begin();
diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
index 73a27df5d..7d15812f8 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
@@ -2,123 +2,75 @@
use std::hash::Hash;
-use parser::{SyntaxKind, T};
+use stdx::{always, itertools::Itertools};
use syntax::{TextRange, TextSize};
+use tt::Span;
-use crate::syntax_bridge::SyntheticTokenId;
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-enum TokenTextRange {
- Token(TextRange),
- Delimiter(TextRange),
+/// Maps absolute text ranges for the corresponding file to the relevant span data.
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub struct SpanMap<S: Span> {
+ spans: Vec<(TextSize, S)>,
}
-impl TokenTextRange {
- fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
- match self {
- TokenTextRange::Token(it) => Some(it),
- TokenTextRange::Delimiter(it) => match kind {
- T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
- T!['}'] | T![')'] | T![']'] => {
- Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
- }
- _ => None,
- },
- }
+impl<S: Span> SpanMap<S> {
+ /// Creates a new empty [`SpanMap`].
+ pub fn empty() -> Self {
+ Self { spans: Vec::new() }
}
-}
-/// Maps `tt::TokenId` to the relative range of the original token.
-#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
-pub struct TokenMap {
- /// Maps `tt::TokenId` to the *relative* source range.
- entries: Vec<(tt::TokenId, TokenTextRange)>,
- pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
-}
-
-impl TokenMap {
- pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
- let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
- TokenTextRange::Token(it) => *it == relative_range,
- TokenTextRange::Delimiter(it) => {
- let open = TextRange::at(it.start(), 1.into());
- let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
- open == relative_range || close == relative_range
- }
- })?;
- Some(token_id)
+ /// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are
+ /// in order.
+ pub fn finish(&mut self) {
+ always!(
+ self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0),
+ "spans are not in order"
+ );
+ self.spans.shrink_to_fit();
}
- pub fn ranges_by_token(
- &self,
- token_id: tt::TokenId,
- kind: SyntaxKind,
- ) -> impl Iterator<Item = TextRange> + '_ {
- self.entries
- .iter()
- .filter(move |&&(tid, _)| tid == token_id)
- .filter_map(move |(_, range)| range.by_kind(kind))
- }
-
- pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
- self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
- }
-
- pub fn first_range_by_token(
- &self,
- token_id: tt::TokenId,
- kind: SyntaxKind,
- ) -> Option<TextRange> {
- self.ranges_by_token(token_id, kind).next()
- }
-
- pub(crate) fn shrink_to_fit(&mut self) {
- self.entries.shrink_to_fit();
- self.synthetic_entries.shrink_to_fit();
- }
-
- pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
- self.entries.push((token_id, TokenTextRange::Token(relative_range)));
- }
-
- pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
- self.synthetic_entries.push((token_id, id));
- }
-
- pub(crate) fn insert_delim(
- &mut self,
- token_id: tt::TokenId,
- open_relative_range: TextRange,
- close_relative_range: TextRange,
- ) -> usize {
- let res = self.entries.len();
- let cover = open_relative_range.cover(close_relative_range);
-
- self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
- res
+ /// Pushes a new span onto the [`SpanMap`].
+ pub fn push(&mut self, offset: TextSize, span: S) {
+ if cfg!(debug_assertions) {
+ if let Some(&(last_offset, _)) = self.spans.last() {
+ assert!(
+ last_offset < offset,
+ "last_offset({last_offset:?}) must be smaller than offset({offset:?})"
+ );
+ }
+ }
+ self.spans.push((offset, span));
}
- pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
- let (_, token_text_range) = &mut self.entries[idx];
- if let TokenTextRange::Delimiter(dim) = token_text_range {
- let cover = dim.cover(close_relative_range);
- *token_text_range = TokenTextRange::Delimiter(cover);
- }
+ /// Returns all [`TextRange`]s that correspond to the given span.
+ ///
+ /// Note this does a linear search through the entire backing vector.
+ pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
+ // FIXME: This should ignore the syntax context!
+ self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
+ if s != span {
+ return None;
+ }
+ let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
+ Some(TextRange::new(start, end))
+ })
}
- pub(crate) fn remove_delim(&mut self, idx: usize) {
- // FIXME: This could be accidentally quadratic
- self.entries.remove(idx);
+ /// Returns the span at the given position.
+ pub fn span_at(&self, offset: TextSize) -> S {
+ let entry = self.spans.partition_point(|&(it, _)| it <= offset);
+ self.spans[entry].1
}
- pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
- self.entries.iter().filter_map(|&(tid, tr)| match tr {
- TokenTextRange::Token(range) => Some((tid, range)),
- TokenTextRange::Delimiter(_) => None,
- })
+ /// Returns the spans associated with the given range.
+ /// In other words, this will return all spans that correspond to all offsets within the given range.
+ pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = S> + '_ {
+ let (start, end) = (range.start(), range.end());
+ let start_entry = self.spans.partition_point(|&(it, _)| it <= start);
+ let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong?
+ (&self.spans[start_entry..][..end_entry]).iter().map(|&(_, s)| s)
}
- pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
- self.entries.retain(|&(tid, _)| id(tid));
+ pub fn iter(&self) -> impl Iterator<Item = (TextSize, S)> + '_ {
+ self.spans.iter().copied()
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
index 79ff8ca28..40e8a2385 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -3,16 +3,17 @@
use smallvec::{smallvec, SmallVec};
use syntax::SyntaxKind;
+use tt::Span;
-use crate::{to_parser_input::to_parser_input, tt, ExpandError, ExpandResult};
+use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
#[derive(Debug, Clone)]
-pub(crate) struct TtIter<'a> {
- pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>,
+pub(crate) struct TtIter<'a, S> {
+ pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>,
}
-impl<'a> TtIter<'a> {
- pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> {
+impl<'a, S: Span> TtIter<'a, S> {
+ pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> {
TtIter { inner: subtree.token_trees.iter() }
}
@@ -36,35 +37,35 @@ impl<'a> TtIter<'a> {
}
}
- pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> {
+ pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree<S>, ()> {
match self.next() {
Some(tt::TokenTree::Subtree(it)) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> {
+ pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
match self.next() {
Some(tt::TokenTree::Leaf(it)) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> {
+ pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) if it.text != "_" => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> {
+ pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
+ pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
let it = self.expect_leaf()?;
match it {
tt::Leaf::Literal(_) => Ok(it),
@@ -73,7 +74,7 @@ impl<'a> TtIter<'a> {
}
}
- pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> {
+ pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Punct(it) => Ok(it),
_ => Err(()),
@@ -84,7 +85,7 @@ impl<'a> TtIter<'a> {
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
- pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct; 3]>, ()> {
+ pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct<S>; 3]>, ()> {
let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
return Err(());
};
@@ -126,11 +127,10 @@ impl<'a> TtIter<'a> {
pub(crate) fn expect_fragment(
&mut self,
entry_point: parser::PrefixEntryPoint,
- ) -> ExpandResult<Option<tt::TokenTree>> {
+ ) -> ExpandResult<Option<tt::TokenTree<S>>> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input);
-
let mut cursor = buffer.begin();
let mut error = false;
for step in tree_traversal.iter() {
@@ -162,32 +162,30 @@ impl<'a> TtIter<'a> {
let mut curr = buffer.begin();
let mut res = vec![];
- if cursor.is_root() {
- while curr != cursor {
- let Some(token) = curr.token_tree() else { break };
- res.push(token.cloned());
- curr = curr.bump();
- }
+ while curr != cursor {
+ let Some(token) = curr.token_tree() else { break };
+ res.push(token.cloned());
+ curr = curr.bump();
}
self.inner = self.inner.as_slice()[res.len()..].iter();
let res = match res.len() {
0 | 1 => res.pop(),
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: res,
})),
};
ExpandResult { value: res, err }
}
- pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> {
+ pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
}
-impl<'a> Iterator for TtIter<'a> {
- type Item = &'a tt::TokenTree;
+impl<'a, S> Iterator for TtIter<'a, S> {
+ type Item = &'a tt::TokenTree<S>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
@@ -197,4 +195,4 @@ impl<'a> Iterator for TtIter<'a> {
}
}
-impl std::iter::ExactSizeIterator for TtIter<'_> {}
+impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {}