summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/mbe
diff options
context:
space:
mode:
Diffstat (limited to 'src/tools/rust-analyzer/crates/mbe')
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs15
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs5
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs34
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs128
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs49
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/parser.rs36
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs74
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs7
8 files changed, 253 insertions, 95 deletions
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index 894355fcb..d28dd17de 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -20,7 +20,10 @@ fn benchmark_parse_macro_rules() {
let rules = macro_rules_fixtures_tt();
let hash: usize = {
let _pt = bench("mbe parse macro rules");
- rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it).unwrap().rules.len()).sum()
+ rules
+ .values()
+ .map(|it| DeclarativeMacro::parse_macro_rules(it, true).unwrap().rules.len())
+ .sum()
};
assert_eq!(hash, 1144);
}
@@ -50,7 +53,7 @@ fn benchmark_expand_macro_rules() {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
macro_rules_fixtures_tt()
.into_iter()
- .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt).unwrap()))
+ .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true).unwrap()))
.collect()
}
@@ -76,7 +79,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
let mut res = Vec::new();
for (name, it) in rules {
- for rule in &it.rules {
+ for rule in it.rules.iter() {
// Generate twice
for _ in 0..2 {
// The input are generated by filling the `Op` randomly.
@@ -108,7 +111,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
}
try_cnt += 1;
if try_cnt > 100 {
- panic!("invocaton fixture {name} cannot be generated.\n");
+ panic!("invocation fixture {name} cannot be generated.\n");
}
}
}
@@ -192,10 +195,10 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
});
parent.token_trees.push(subtree.into());
}
- Op::Ignore { .. } | Op::Index { .. } => {}
+ Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } => {}
};
- // Simple linear congruential generator for determistic result
+ // Simple linear congruential generator for deterministic result
fn rand(seed: &mut usize) -> usize {
let a = 1664525;
let c = 1013904223;
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index 7537dc322..8e2181e97 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -13,10 +13,11 @@ use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult};
pub(crate) fn expand_rules(
rules: &[crate::Rule],
input: &tt::Subtree,
+ is_2021: bool,
) -> ExpandResult<tt::Subtree> {
let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
for rule in rules {
- let new_match = matcher::match_(&rule.lhs, input);
+ let new_match = matcher::match_(&rule.lhs, input, is_2021);
if new_match.err.is_none() {
// If we find a rule that applies without errors, we're done.
@@ -45,7 +46,7 @@ pub(crate) fn expand_rules(
transcriber::transcribe(&rule.rhs, &match_.bindings);
ExpandResult { value, err: match_.err.or(transcribe_err) }
} else {
- ExpandResult::with_err(
+ ExpandResult::new(
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
ExpandError::NoMatchingRule,
)
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index f4ea9e5c8..474826079 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -111,8 +111,8 @@ impl Match {
}
/// Matching errors are added to the `Match`.
-pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match {
- let mut res = match_loop(pattern, input);
+pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match {
+ let mut res = match_loop(pattern, input, is_2021);
res.bound_count = count(res.bindings.bindings());
return res;
@@ -332,7 +332,7 @@ struct MatchState<'t> {
/// Cached result of meta variable parsing
meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
- /// Is error occuried in this state, will `poised` to "parent"
+ /// Is error occurred in this state, will `poised` to "parent"
is_error: bool,
}
@@ -354,6 +354,7 @@ struct MatchState<'t> {
/// - `eof_items`: the set of items that would be valid if this was the EOF.
/// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `error_items`: the set of items in errors, used for error-resilient parsing
+#[inline]
fn match_loop_inner<'t>(
src: TtIter<'t>,
stack: &[TtIter<'t>],
@@ -364,6 +365,7 @@ fn match_loop_inner<'t>(
next_items: &mut Vec<MatchState<'t>>,
eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
error_items: &mut SmallVec<[MatchState<'t>; 1]>,
+ is_2021: bool,
) {
macro_rules! try_push {
($items: expr, $it:expr) => {
@@ -474,7 +476,7 @@ fn match_loop_inner<'t>(
OpDelimited::Op(Op::Var { kind, name, .. }) => {
if let &Some(kind) = kind {
let mut fork = src.clone();
- let match_res = match_meta_var(kind, &mut fork);
+ let match_res = match_meta_var(kind, &mut fork, is_2021);
match match_res.err {
None => {
// Some meta variables are optional (e.g. vis)
@@ -565,7 +567,9 @@ fn match_loop_inner<'t>(
item.is_error = true;
error_items.push(item);
}
- OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. }) => {}
+ OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. }) => {
+ stdx::never!("metavariable expression in lhs found");
+ }
OpDelimited::Open => {
if matches!(src.peek_n(0), Some(tt::TokenTree::Subtree(..))) {
item.dot.next();
@@ -583,7 +587,7 @@ fn match_loop_inner<'t>(
}
}
-fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
+fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match {
let mut src = TtIter::new(src);
let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
let mut res = Match::default();
@@ -622,6 +626,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
&mut next_items,
&mut eof_items,
&mut error_items,
+ is_2021,
);
stdx::always!(cur_items.is_empty());
@@ -731,14 +736,17 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
}
}
-fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult<Option<Fragment>> {
+fn match_meta_var(
+ kind: MetaVarKind,
+ input: &mut TtIter<'_>,
+ is_2021: bool,
+) -> ExpandResult<Option<Fragment>> {
let fragment = match kind {
MetaVarKind::Path => parser::PrefixEntryPoint::Path,
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
- // FIXME: These two should actually behave differently depending on the edition.
- //
- // https://doc.rust-lang.org/edition-guide/rust-2021/or-patterns-macro-rules.html
- MetaVarKind::Pat | MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat,
+ MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
+ MetaVarKind::Pat => parser::PrefixEntryPoint::Pat,
+ MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat,
MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt,
MetaVarKind::Block => parser::PrefixEntryPoint::Block,
MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem,
@@ -805,7 +813,9 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
Op::Var { name, .. } => collector_fun(name.clone()),
Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens),
Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens),
- Op::Ignore { .. } | Op::Index { .. } | Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => {
+ Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => {}
+ Op::Ignore { .. } | Op::Index { .. } | Op::Count { .. } => {
+ stdx::never!("metavariable expression in lhs found");
}
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index dffb40d4b..6161af185 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -7,7 +7,7 @@ use crate::{
expander::{Binding, Bindings, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
tt::{self, Delimiter},
- ExpandError, ExpandResult, MetaTemplate,
+ CountError, ExpandError, ExpandResult, MetaTemplate,
};
impl Bindings {
@@ -15,13 +15,23 @@ impl Bindings {
self.inner.contains_key(name)
}
- fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<Fragment, ExpandError> {
+ fn get(&self, name: &str) -> Result<&Binding, ExpandError> {
+ match self.inner.get(name) {
+ Some(binding) => Ok(binding),
+ None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))),
+ }
+ }
+
+ fn get_fragment(
+ &self,
+ name: &str,
+ nesting: &mut [NestingState],
+ ) -> Result<Fragment, ExpandError> {
macro_rules! binding_err {
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
}
- let mut b: &Binding =
- self.inner.get(name).ok_or_else(|| binding_err!("could not find binding `{name}`"))?;
+ let mut b = self.get(name)?;
for nesting_state in nesting.iter_mut() {
nesting_state.hit = true;
b = match b {
@@ -133,7 +143,7 @@ fn expand_subtree(
// remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
let start_elements = arena.len();
let mut err = None;
- for op in template.iter() {
+ 'ops: for op in template.iter() {
match op {
Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()),
Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()),
@@ -161,13 +171,12 @@ fn expand_subtree(
}
Op::Ignore { name, id } => {
// Expand the variable, but ignore the result. This registers the repetition count.
+ // FIXME: Any emitted errors are dropped.
expand_var(ctx, name, *id);
}
Op::Index { depth } => {
- let index = ctx
- .nesting
- .get(ctx.nesting.len() - 1 - (*depth as usize))
- .map_or(0, |nest| nest.idx);
+ let index =
+ ctx.nesting.get(ctx.nesting.len() - 1 - depth).map_or(0, |nest| nest.idx);
arena.push(
tt::Leaf::Literal(tt::Literal {
text: index.to_string().into(),
@@ -176,6 +185,65 @@ fn expand_subtree(
.into(),
);
}
+ Op::Count { name, depth } => {
+ let mut binding = match ctx.bindings.get(name.as_str()) {
+ Ok(b) => b,
+ Err(e) => {
+ if err.is_none() {
+ err = Some(e);
+ }
+ continue;
+ }
+ };
+ for state in ctx.nesting.iter_mut() {
+ state.hit = true;
+ match binding {
+ Binding::Fragment(_) | Binding::Missing(_) => {
+ // `count()` will report an error.
+ break;
+ }
+ Binding::Nested(bs) => {
+ if let Some(b) = bs.get(state.idx) {
+ binding = b;
+ } else {
+ state.at_end = true;
+ continue 'ops;
+ }
+ }
+ Binding::Empty => {
+ state.at_end = true;
+ // FIXME: Breaking here and proceeding to `count()` isn't the most
+ // correct thing to do here. This could be a binding of some named
+ // fragment which we don't know the depth of, so `count()` will just
+ // return 0 for this no matter what `depth` is. See test
+ // `count_interaction_with_empty_binding` for example.
+ break;
+ }
+ }
+ }
+
+ let c = match count(ctx, binding, 0, *depth) {
+ Ok(c) => c,
+ Err(e) => {
+ // XXX: It *might* make sense to emit a dummy integer value like `0` here.
+ // That would type inference a bit more robust in cases like
+ // `v[${count(t)}]` where index doesn't matter, but also coult also lead to
+ // wrong infefrence for cases like `tup.${count(t)}` where index itself
+ // does matter.
+ if err.is_none() {
+ err = Some(e.into());
+ }
+ continue;
+ }
+ };
+ arena.push(
+ tt::Leaf::Literal(tt::Literal {
+ text: c.to_string().into(),
+ span: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
+ }
}
}
// drain the elements added in this instance of expand_subtree
@@ -218,12 +286,9 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe
.into();
ExpandResult::ok(Fragment::Tokens(tt))
} else {
- ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
+ ctx.bindings.get_fragment(v, &mut ctx.nesting).map_or_else(
|e| ExpandResult {
- value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: vec![],
- })),
+ value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())),
err: Some(e),
},
ExpandResult::ok,
@@ -245,6 +310,7 @@ fn expand_repeat(
let limit = 65536;
let mut has_seps = 0;
let mut counter = 0;
+ let mut err = None;
loop {
let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
@@ -272,6 +338,7 @@ fn expand_repeat(
}
if e.is_some() {
+ err = err.or(e);
continue;
}
@@ -317,7 +384,7 @@ fn expand_repeat(
err: Some(ExpandError::UnexpectedToken),
};
}
- ExpandResult::ok(Fragment::Tokens(tt))
+ ExpandResult { value: Fragment::Tokens(tt), err }
}
fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
@@ -343,3 +410,34 @@ fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
_ => buf.push(tt.into()),
}
}
+
+/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
+/// defined by the metavar expression.
+fn count(
+ ctx: &ExpandCtx<'_>,
+ binding: &Binding,
+ our_depth: usize,
+ count_depth: Option<usize>,
+) -> Result<usize, CountError> {
+ match binding {
+ Binding::Nested(bs) => match count_depth {
+ None => bs.iter().map(|b| count(ctx, b, our_depth + 1, None)).sum(),
+ Some(0) => Ok(bs.len()),
+ Some(d) => bs.iter().map(|b| count(ctx, b, our_depth + 1, Some(d - 1))).sum(),
+ },
+ Binding::Empty => Ok(0),
+ Binding::Fragment(_) | Binding::Missing(_) => {
+ if our_depth == 0 {
+ // `${count(t)}` is placed inside the innermost repetition. This includes cases
+ // where `t` is not a repeated fragment.
+ Err(CountError::Misplaced)
+ } else if count_depth.is_none() {
+ Ok(1)
+ } else {
+ // We've reached at the innermost repeated fragment, but the user wants us to go
+ // further!
+ Err(CountError::OutOfBounds)
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index ac107a0d6..5ef20ff8a 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -19,6 +19,7 @@ mod benchmark;
mod token_map;
use ::tt::token_id as tt;
+use stdx::impl_from;
use std::fmt;
@@ -69,7 +70,7 @@ impl fmt::Display for ParseError {
}
}
-#[derive(Debug, PartialEq, Eq, Clone)]
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum ExpandError {
BindingError(Box<Box<str>>),
LeftoverTokens,
@@ -77,8 +78,11 @@ pub enum ExpandError {
LimitExceeded,
NoMatchingRule,
UnexpectedToken,
+ CountError(CountError),
}
+impl_from!(CountError for ExpandError);
+
impl ExpandError {
fn binding_error(e: impl Into<Box<str>>) -> ExpandError {
ExpandError::BindingError(Box::new(e.into()))
@@ -94,6 +98,23 @@ impl fmt::Display for ExpandError {
ExpandError::ConversionError => f.write_str("could not convert tokens"),
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
+ ExpandError::CountError(e) => e.fmt(f),
+ }
+ }
+}
+
+// FIXME: Showing these errors could be nicer.
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub enum CountError {
+ OutOfBounds,
+ Misplaced,
+}
+
+impl fmt::Display for CountError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ CountError::OutOfBounds => f.write_str("${count} out of bounds"),
+ CountError::Misplaced => f.write_str("${count} misplaced"),
}
}
}
@@ -104,9 +125,12 @@ impl fmt::Display for ExpandError {
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DeclarativeMacro {
- rules: Vec<Rule>,
+ rules: Box<[Rule]>,
/// Highest id of the token we have in TokenMap
shift: Shift,
+ // This is used for correctly determining the behavior of the pat fragment
+ // FIXME: This should be tracked by hygiene of the fragment identifier!
+ is_2021: bool,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -190,7 +214,10 @@ pub enum Origin {
impl DeclarativeMacro {
/// The old, `macro_rules! m {}` flavor.
- pub fn parse_macro_rules(tt: &tt::Subtree) -> Result<DeclarativeMacro, ParseError> {
+ pub fn parse_macro_rules(
+ tt: &tt::Subtree,
+ is_2021: bool,
+ ) -> Result<DeclarativeMacro, ParseError> {
// Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier.
@@ -211,11 +238,11 @@ impl DeclarativeMacro {
validate(lhs)?;
}
- Ok(DeclarativeMacro { rules, shift: Shift::new(tt) })
+ Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 })
}
/// The new, unstable `macro m {}` flavor.
- pub fn parse_macro2(tt: &tt::Subtree) -> Result<DeclarativeMacro, ParseError> {
+ pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> Result<DeclarativeMacro, ParseError> {
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
@@ -244,14 +271,14 @@ impl DeclarativeMacro {
validate(lhs)?;
}
- Ok(DeclarativeMacro { rules, shift: Shift::new(tt) })
+ Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 })
}
pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
// apply shift
let mut tt = tt.clone();
self.shift.shift_all(&mut tt);
- expander::expand_rules(&self.rules, &tt)
+ expander::expand_rules(&self.rules, &tt, self.is_2021)
}
pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
@@ -324,12 +351,12 @@ pub struct ValueResult<T, E> {
}
impl<T, E> ValueResult<T, E> {
- pub fn ok(value: T) -> Self {
- Self { value, err: None }
+ pub fn new(value: T, err: E) -> Self {
+ Self { value, err: Some(err) }
}
- pub fn with_err(value: T, err: E) -> Self {
- Self { value, err: Some(err) }
+ pub fn ok(value: T) -> Self {
+ Self { value, err: None }
}
pub fn only_err(err: E) -> Self
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
index fd3d64719..7a143e746 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -20,7 +20,7 @@ use crate::{tt, tt_iter::TtIter, ParseError};
/// Stuff to the right is a [`MetaTemplate`] template which is used to produce
/// output.
#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) struct MetaTemplate(pub(crate) Vec<Op>);
+pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
impl MetaTemplate {
pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
@@ -44,7 +44,7 @@ impl MetaTemplate {
res.push(op);
}
- Ok(MetaTemplate(res))
+ Ok(MetaTemplate(res.into_boxed_slice()))
}
}
@@ -52,7 +52,8 @@ impl MetaTemplate {
pub(crate) enum Op {
Var { name: SmolStr, kind: Option<MetaVarKind>, id: tt::TokenId },
Ignore { name: SmolStr, id: tt::TokenId },
- Index { depth: u32 },
+ Index { depth: usize },
+ Count { name: SmolStr, depth: Option<usize> },
Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter },
Literal(tt::Literal),
@@ -295,9 +296,13 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
let ident = args.expect_ident()?;
Op::Ignore { name: ident.text.clone(), id: ident.span }
}
- "index" => {
- let depth = if args.len() == 0 { 0 } else { args.expect_u32_literal()? };
- Op::Index { depth }
+ "index" => Op::Index { depth: parse_depth(&mut args)? },
+ "count" => {
+ let ident = args.expect_ident()?;
+ // `${count(t)}` and `${count(t,)}` have different meanings. Not sure if this is a bug
+ // but that's how it's implemented in rustc as of this writing. See rust-lang/rust#111904.
+ let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None };
+ Op::Count { name: ident.text.clone(), depth }
}
_ => return Err(()),
};
@@ -308,3 +313,22 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
Ok(op)
}
+
+fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
+ if src.len() == 0 {
+ Ok(0)
+ } else if let tt::Leaf::Literal(lit) = src.expect_literal()? {
+ // Suffixes are not allowed.
+ lit.text.parse().map_err(|_| ())
+ } else {
+ Err(())
+ }
+}
+
+fn try_eat_comma(src: &mut TtIter<'_>) -> bool {
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) {
+ let _ = src.next();
+ return true;
+ }
+ false
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index fb5313401..8cbf0f8fc 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -190,20 +190,13 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
let kind = token.kind(conv);
if kind == COMMENT {
- if let Some(tokens) = conv.convert_doc_comment(&token) {
- // FIXME: There has to be a better way to do this
- // Add the comments token id to the converted doc string
+ // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
+ // figure out which token id to use for the doc comment, if it is converted successfully.
+ let next_id = conv.id_alloc().peek_next_id();
+ if let Some(tokens) = conv.convert_doc_comment(&token, next_id) {
let id = conv.id_alloc().alloc(range, synth_id);
- result.extend(tokens.into_iter().map(|mut tt| {
- if let tt::TokenTree::Subtree(sub) = &mut tt {
- if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
- sub.token_trees.get_mut(2)
- {
- lit.span = id
- }
- }
- tt
- }));
+ debug_assert_eq!(id, next_id);
+ result.extend(tokens);
}
continue;
}
@@ -382,49 +375,46 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
text.into()
}
-fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
+fn convert_doc_comment(
+ token: &syntax::SyntaxToken,
+ span: tt::TokenId,
+) -> Option<Vec<tt::TokenTree>> {
cov_mark::hit!(test_meta_doc_comments);
let comment = ast::Comment::cast(token.clone())?;
let doc = comment.kind().doc?;
// Make `doc="\" Comments\""
- let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
+ let meta_tkns =
+ vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)];
// Make `#![]`
let mut token_trees = Vec::with_capacity(3);
- token_trees.push(mk_punct('#'));
+ token_trees.push(mk_punct('#', span));
if let ast::CommentPlacement::Inner = doc {
- token_trees.push(mk_punct('!'));
+ token_trees.push(mk_punct('!', span));
}
token_trees.push(tt::TokenTree::from(tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
- kind: tt::DelimiterKind::Bracket,
- },
+ delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
token_trees: meta_tkns,
}));
return Some(token_trees);
// Helper functions
- fn mk_ident(s: &str) -> tt::TokenTree {
- tt::TokenTree::from(tt::Leaf::from(tt::Ident {
- text: s.into(),
- span: tt::TokenId::unspecified(),
- }))
+ fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree {
+ tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }))
}
- fn mk_punct(c: char) -> tt::TokenTree {
+ fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree {
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
char: c,
spacing: tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
+ span,
}))
}
- fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
- let lit = tt::Literal { text: doc_comment_text(comment), span: tt::TokenId::unspecified() };
+ fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree {
+ let lit = tt::Literal { text: doc_comment_text(comment), span };
tt::TokenTree::from(tt::Leaf::from(lit))
}
@@ -480,6 +470,10 @@ impl TokenIdAlloc {
}
}
}
+
+ fn peek_next_id(&self) -> tt::TokenId {
+ tt::TokenId(self.next_id)
+ }
}
/// A raw token (straight from lexer) converter
@@ -502,7 +496,11 @@ trait SrcToken<Ctx>: std::fmt::Debug {
trait TokenConverter: Sized {
type Token: SrcToken<Self>;
- fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
+ fn convert_doc_comment(
+ &self,
+ token: &Self::Token,
+ span: tt::TokenId,
+ ) -> Option<Vec<tt::TokenTree>>;
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
@@ -532,9 +530,9 @@ impl<'a> SrcToken<RawConverter<'a>> for usize {
impl<'a> TokenConverter for RawConverter<'a> {
type Token = usize;
- fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
+ fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
let text = self.lexed.text(token);
- convert_doc_comment(&doc_comment(text))
+ convert_doc_comment(&doc_comment(text), span)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@@ -681,8 +679,12 @@ impl SrcToken<Converter> for SynToken {
impl TokenConverter for Converter {
type Token = SynToken;
- fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
- convert_doc_comment(token.token()?)
+ fn convert_doc_comment(
+ &self,
+ token: &Self::Token,
+ span: tt::TokenId,
+ ) -> Option<Vec<tt::TokenTree>> {
+ convert_doc_comment(token.token()?, span)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
index f744481f3..59dbf1568 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -73,13 +73,6 @@ impl<'a> TtIter<'a> {
}
}
- pub(crate) fn expect_u32_literal(&mut self) -> Result<u32, ()> {
- match self.expect_literal()? {
- tt::Leaf::Literal(lit) => lit.text.parse().map_err(drop),
- _ => Err(()),
- }
- }
-
pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> {
match self.expect_leaf()? {
tt::Leaf::Punct(it) => Ok(it),