summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_expand/src/mbe
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
commitd1b2d29528b7794b41e66fc2136e395a02f8529b (patch)
treea4a17504b260206dec3cf55b2dca82929a348ac2 /compiler/rustc_expand/src/mbe
parentReleasing progress-linux version 1.72.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.tar.xz
rustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.zip
Merging upstream version 1.73.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_expand/src/mbe')
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs5
-rw-r--r--compiler/rustc_expand/src/mbe/macro_check.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs16
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs56
-rw-r--r--compiler/rustc_expand/src/mbe/metavar_expr.rs23
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs44
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs15
7 files changed, 88 insertions, 75 deletions
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
index 3593bed2d..e06037564 100644
--- a/compiler/rustc_expand/src/mbe/diagnostics.rs
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -42,7 +42,8 @@ pub(super) fn failed_to_match_macro<'cx>(
return result;
}
- let Some(BestFailure { token, msg: label, remaining_matcher, .. }) = tracker.best_failure else {
+ let Some(BestFailure { token, msg: label, remaining_matcher, .. }) = tracker.best_failure
+ else {
return DummyResult::any(sp);
};
@@ -256,7 +257,7 @@ pub(super) fn emit_frag_parse_err(
e.span_suggestion_verbose(
site_span,
"surround the macro invocation with `{}` to interpret the expansion as a statement",
- format!("{{ {}; }}", snippet),
+ format!("{{ {snippet}; }}"),
Applicability::MaybeIncorrect,
);
}
diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs
index 34f998274..95f5bb2d2 100644
--- a/compiler/rustc_expand/src/mbe/macro_check.rs
+++ b/compiler/rustc_expand/src/mbe/macro_check.rs
@@ -593,7 +593,7 @@ fn check_ops_is_prefix(
return;
}
}
- buffer_lint(sess, span.into(), node_id, format!("unknown macro variable `{}`", name));
+ buffer_lint(sess, span.into(), node_id, format!("unknown macro variable `{name}`"));
}
/// Returns whether `binder_ops` is a prefix of `occurrence_ops`.
@@ -626,7 +626,7 @@ fn ops_is_prefix(
if i >= occurrence_ops.len() {
let mut span = MultiSpan::from_span(span);
span.push_span_label(binder.span, "expected repetition");
- let message = format!("variable '{}' is still repeating at this depth", name);
+ let message = format!("variable '{name}' is still repeating at this depth");
buffer_lint(sess, span, node_id, message);
return;
}
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index f0e67cfd5..7e85beaad 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -81,7 +81,7 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::ErrorGuaranteed;
use rustc_lint_defs::pluralize;
-use rustc_parse::parser::{NtOrTt, Parser};
+use rustc_parse::parser::{ParseNtResult, Parser};
use rustc_span::symbol::Ident;
use rustc_span::symbol::MacroRulesNormalizedIdent;
use rustc_span::Span;
@@ -156,7 +156,7 @@ impl Display for MatcherLoc {
MatcherLoc::MetaVarDecl { bind, kind, .. } => {
write!(f, "meta-variable `${bind}")?;
if let Some(kind) = kind {
- write!(f, ":{}", kind)?;
+ write!(f, ":{kind}")?;
}
write!(f, "`")?;
Ok(())
@@ -692,8 +692,8 @@ impl TtParser {
Ok(nt) => nt,
};
let m = match nt {
- NtOrTt::Nt(nt) => MatchedNonterminal(Lrc::new(nt)),
- NtOrTt::Tt(tt) => MatchedTokenTree(tt),
+ ParseNtResult::Nt(nt) => MatchedNonterminal(Lrc::new(nt)),
+ ParseNtResult::Tt(tt) => MatchedTokenTree(tt),
};
mp.push_match(next_metavar, seq_depth, m);
mp.idx += 1;
@@ -723,7 +723,7 @@ impl TtParser {
.iter()
.map(|mp| match &matcher[mp.idx] {
MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
- format!("{} ('{}')", kind, bind)
+ format!("{kind} ('{bind}')")
}
_ => unreachable!(),
})
@@ -736,8 +736,8 @@ impl TtParser {
"local ambiguity when calling macro `{}`: multiple parsing options: {}",
self.macro_name,
match self.next_mps.len() {
- 0 => format!("built-in NTs {}.", nts),
- n => format!("built-in NTs {} or {n} other option{s}.", nts, s = pluralize!(n)),
+ 0 => format!("built-in NTs {nts}."),
+ n => format!("built-in NTs {nts} or {n} other option{s}.", s = pluralize!(n)),
}
),
)
@@ -757,7 +757,7 @@ impl TtParser {
match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
Vacant(spot) => spot.insert(res.next().unwrap()),
Occupied(..) => {
- return Error(span, format!("duplicated bind name: {}", bind));
+ return Error(span, format!("duplicated bind name: {bind}"));
}
};
} else {
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 42cc0a6b1..a5959d68f 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -16,6 +16,7 @@ use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, TransparencyError};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_errors::{Applicability, ErrorGuaranteed};
+use rustc_feature::Features;
use rustc_lint_defs::builtin::{
RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
};
@@ -249,7 +250,7 @@ fn expand_macro<'cx>(
trace_macros_note(&mut cx.expansions, sp, msg);
}
- let p = Parser::new(sess, tts, false, None);
+ let p = Parser::new(sess, tts, None);
if is_local {
cx.resolver.record_macro_rule_usage(node_id, i);
@@ -257,7 +258,7 @@ fn expand_macro<'cx>(
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
- return Box::new(ParserAnyMacro {
+ Box::new(ParserAnyMacro {
parser: p,
// Pass along the original expansion site and the name of the macro
@@ -269,18 +270,17 @@ fn expand_macro<'cx>(
is_trailing_mac: cx.current_expansion.is_trailing_mac,
arm_span,
is_local,
- });
+ })
}
Err(CanRetry::No(_)) => {
debug!("Will not retry matching as an error was emitted already");
- return DummyResult::any(sp);
+ DummyResult::any(sp)
}
Err(CanRetry::Yes) => {
- // Retry and emit a better error below.
+ // Retry and emit a better error.
+ diagnostics::failed_to_match_macro(cx, sp, def_span, name, arg, lhses)
}
}
-
- diagnostics::failed_to_match_macro(cx, sp, def_span, name, arg, lhses)
}
pub(super) enum CanRetry {
@@ -376,6 +376,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
/// Converts a macro item into a syntax extension.
pub fn compile_declarative_macro(
sess: &Session,
+ features: &Features,
def: &ast::Item,
edition: Edition,
) -> (SyntaxExtension, Vec<(usize, Span)>) {
@@ -383,6 +384,7 @@ pub fn compile_declarative_macro(
let mk_syn_ext = |expander| {
SyntaxExtension::new(
sess,
+ features,
SyntaxExtensionKind::LegacyBang(expander),
def.span,
Vec::new(),
@@ -447,7 +449,7 @@ pub fn compile_declarative_macro(
let create_parser = || {
let body = macro_def.body.tokens.clone();
- Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS)
+ Parser::new(&sess.parse_sess, body, rustc_parse::MACRO_ARGUMENTS)
};
let parser = create_parser();
@@ -457,8 +459,8 @@ pub fn compile_declarative_macro(
match tt_parser.parse_tt(&mut Cow::Owned(parser), &argument_gram, &mut NoopTracker) {
Success(m) => m,
Failure(()) => {
- // The fast `NoopTracker` doesn't have any info on failure, so we need to retry it with another one
- // that gives us the information we need.
+ // The fast `NoopTracker` doesn't have any info on failure, so we need to retry it
+ // with another one that gives us the information we need.
// For this we need to reclone the macro body as the previous parser consumed it.
let retry_parser = create_parser();
@@ -500,11 +502,11 @@ pub fn compile_declarative_macro(
.map(|m| {
if let MatchedTokenTree(tt) = m {
let tt = mbe::quoted::parse(
- TokenStream::new(vec![tt.clone()]),
+ &TokenStream::new(vec![tt.clone()]),
true,
&sess.parse_sess,
def.id,
- sess.features_untracked(),
+ features,
edition,
)
.pop()
@@ -524,11 +526,11 @@ pub fn compile_declarative_macro(
.map(|m| {
if let MatchedTokenTree(tt) = m {
return mbe::quoted::parse(
- TokenStream::new(vec![tt.clone()]),
+ &TokenStream::new(vec![tt.clone()]),
false,
&sess.parse_sess,
def.id,
- sess.features_untracked(),
+ features,
edition,
)
.pop()
@@ -554,7 +556,7 @@ pub fn compile_declarative_macro(
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
match transparency_error {
Some(TransparencyError::UnknownTransparency(value, span)) => {
- diag.span_err(span, format!("unknown macro transparency: `{}`", value));
+ diag.span_err(span, format!("unknown macro transparency: `{value}`"));
}
Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) => {
diag.span_err(vec![old_span, new_span], "multiple macro transparency attributes");
@@ -1197,10 +1199,10 @@ fn check_matcher_core<'tt>(
may_be = may_be
),
);
- err.span_label(sp, format!("not allowed after `{}` fragments", kind));
+ err.span_label(sp, format!("not allowed after `{kind}` fragments"));
if kind == NonterminalKind::PatWithOr
- && sess.edition.rust_2021()
+ && sess.edition.at_least_rust_2021()
&& next_token.is_token(&BinOp(token::BinOpToken::Or))
{
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(
@@ -1221,8 +1223,7 @@ fn check_matcher_core<'tt>(
&[] => {}
&[t] => {
err.note(format!(
- "only {} is allowed after `{}` fragments",
- t, kind,
+ "only {t} is allowed after `{kind}` fragments",
));
}
ts => {
@@ -1327,7 +1328,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
_ => IsInFollow::No(TOKENS),
}
}
- NonterminalKind::PatWithOr { .. } => {
+ NonterminalKind::PatWithOr => {
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
match tok {
TokenTree::Token(token) => match token.kind {
@@ -1407,9 +1408,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
match tt {
mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(),
- mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
- mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind),
- mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name),
+ mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
+ mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${name}:{kind}"),
+ mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${name}:"),
_ => panic!(
"{}",
"unexpected mbe::TokenTree::{Sequence or Delimited} \
@@ -1418,6 +1419,11 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
}
}
-pub(super) fn parser_from_cx(sess: &ParseSess, tts: TokenStream, recovery: Recovery) -> Parser<'_> {
- Parser::new(sess, tts, true, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
+pub(super) fn parser_from_cx(
+ sess: &ParseSess,
+ mut tts: TokenStream,
+ recovery: Recovery,
+) -> Parser<'_> {
+ tts.desugar_doc_comments();
+ Parser::new(sess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
}
diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs
index 6e9196150..7c37aadc6 100644
--- a/compiler/rustc_expand/src/mbe/metavar_expr.rs
+++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs
@@ -93,7 +93,17 @@ fn parse_count<'sess>(
span: Span,
) -> PResult<'sess, MetaVarExpr> {
let ident = parse_ident(iter, sess, span)?;
- let depth = if try_eat_comma(iter) { Some(parse_depth(iter, sess, span)?) } else { None };
+ let depth = if try_eat_comma(iter) {
+ if iter.look_ahead(0).is_none() {
+ return Err(sess.span_diagnostic.struct_span_err(
+ span,
+ "`count` followed by a comma must have an associated index indicating its depth",
+ ));
+ }
+ Some(parse_depth(iter, sess, span)?)
+ } else {
+ None
+ };
Ok(MetaVarExpr::Count(ident, depth))
}
@@ -104,13 +114,10 @@ fn parse_depth<'sess>(
span: Span,
) -> PResult<'sess, usize> {
let Some(tt) = iter.next() else { return Ok(0) };
- let TokenTree::Token(token::Token {
- kind: token::TokenKind::Literal(lit), ..
- }, _) = tt else {
- return Err(sess.span_diagnostic.struct_span_err(
- span,
- "meta-variable expression depth must be a literal"
- ));
+ let TokenTree::Token(token::Token { kind: token::TokenKind::Literal(lit), .. }, _) = tt else {
+ return Err(sess
+ .span_diagnostic
+ .struct_span_err(span, "meta-variable expression depth must be a literal"));
};
if let Ok(lit_kind) = LitKind::from_token_lit(*lit)
&& let LitKind::Int(n_u128, LitIntType::Unsuffixed) = lit_kind
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 40bfa3715..6546199f5 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -36,7 +36,7 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
///
/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
pub(super) fn parse(
- input: tokenstream::TokenStream,
+ input: &tokenstream::TokenStream,
parsing_patterns: bool,
sess: &ParseSess,
node_id: NodeId,
@@ -48,7 +48,7 @@ pub(super) fn parse(
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
// additional trees if need be.
- let mut trees = input.into_trees();
+ let mut trees = input.trees();
while let Some(tree) = trees.next() {
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
@@ -56,7 +56,7 @@ pub(super) fn parse(
match tree {
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
let span = match trees.next() {
- Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => {
+ Some(&tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => {
match trees.next() {
Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
Some((frag, _)) => {
@@ -96,10 +96,10 @@ pub(super) fn parse(
}
_ => token.span,
},
- tree => tree.as_ref().map_or(span, tokenstream::TokenTree::span),
+ tree => tree.map_or(span, tokenstream::TokenTree::span),
}
}
- tree => tree.as_ref().map_or(start_sp, tokenstream::TokenTree::span),
+ tree => tree.map_or(start_sp, tokenstream::TokenTree::span),
};
result.push(TokenTree::MetaVarDecl(span, ident, None));
@@ -134,9 +134,9 @@ fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &ParseSess,
/// - `parsing_patterns`: same as [parse].
/// - `sess`: the parsing session. Any errors will be emitted to this session.
/// - `features`: language features so we can do feature gating.
-fn parse_tree(
- tree: tokenstream::TokenTree,
- outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
+fn parse_tree<'a>(
+ tree: &'a tokenstream::TokenTree,
+ outer_trees: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
parsing_patterns: bool,
sess: &ParseSess,
node_id: NodeId,
@@ -146,13 +146,13 @@ fn parse_tree(
// Depending on what `tree` is, we could be parsing different parts of a macro
match tree {
// `tree` is a `$` token. Look at the next token in `trees`
- tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => {
+ &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => {
// FIXME: Handle `Invisible`-delimited groups in a more systematic way
// during parsing.
let mut next = outer_trees.next();
- let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
+ let mut trees: Box<dyn Iterator<Item = &tokenstream::TokenTree>>;
if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next {
- trees = Box::new(tts.into_trees());
+ trees = Box::new(tts.trees());
next = trees.next();
} else {
trees = Box::new(outer_trees);
@@ -160,7 +160,7 @@ fn parse_tree(
match next {
// `tree` is followed by a delimited set of token trees.
- Some(tokenstream::TokenTree::Delimited(delim_span, delim, tts)) => {
+ Some(&tokenstream::TokenTree::Delimited(delim_span, delim, ref tts)) => {
if parsing_patterns {
if delim != Delimiter::Parenthesis {
span_dollar_dollar_or_metavar_in_the_lhs_err(
@@ -194,7 +194,7 @@ fn parse_tree(
Delimiter::Parenthesis => {}
_ => {
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
- let msg = format!("expected `(` or `{{`, found `{}`", tok);
+ let msg = format!("expected `(` or `{{`, found `{tok}`");
sess.span_diagnostic.span_err(delim_span.entire(), msg);
}
}
@@ -228,7 +228,7 @@ fn parse_tree(
}
// `tree` is followed by another `$`. This is an escaped `$`.
- Some(tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => {
+ Some(&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => {
if parsing_patterns {
span_dollar_dollar_or_metavar_in_the_lhs_err(
sess,
@@ -256,11 +256,11 @@ fn parse_tree(
}
// `tree` is an arbitrary token. Keep it.
- tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token),
+ tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
// descend into the delimited set and further parse it.
- tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
+ &tokenstream::TokenTree::Delimited(span, delim, ref tts) => TokenTree::Delimited(
span,
Delimited {
delim,
@@ -286,16 +286,16 @@ fn kleene_op(token: &Token) -> Option<KleeneOp> {
/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
/// - Err(span) if the next token tree is not a token
-fn parse_kleene_op(
- input: &mut impl Iterator<Item = tokenstream::TokenTree>,
+fn parse_kleene_op<'a>(
+ input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
span: Span,
) -> Result<Result<(KleeneOp, Span), Token>, Span> {
match input.next() {
Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(&token) {
Some(op) => Ok(Ok((op, token.span))),
- None => Ok(Err(token)),
+ None => Ok(Err(token.clone())),
},
- tree => Err(tree.as_ref().map_or(span, tokenstream::TokenTree::span)),
+ tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
}
}
@@ -311,8 +311,8 @@ fn parse_kleene_op(
/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
-fn parse_sep_and_kleene_op(
- input: &mut impl Iterator<Item = tokenstream::TokenTree>,
+fn parse_sep_and_kleene_op<'a>(
+ input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
span: Span,
sess: &ParseSess,
) -> (Option<Token>, KleeneToken) {
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index d523d3eac..15e7ab3fe 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -182,9 +182,7 @@ pub(super) fn transcribe<'a>(
LockstepIterSize::Constraint(len, _) => {
// We do this to avoid an extra clone above. We know that this is a
// sequence already.
- let mbe::TokenTree::Sequence(sp, seq) = seq else {
- unreachable!()
- };
+ let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
// Is the repetition empty?
if len == 0 {
@@ -222,16 +220,15 @@ pub(super) fn transcribe<'a>(
MatchedTokenTree(tt) => {
// `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups.
- let token = tt.clone();
- result.push(token);
+ result.push(tt.clone());
}
MatchedNonterminal(nt) => {
// Other variables are emitted into the output stream as groups with
// `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser.
marker.visit_span(&mut sp);
- let token = TokenTree::token_alone(token::Interpolated(nt.clone()), sp);
- result.push(token);
+ result
+ .push(TokenTree::token_alone(token::Interpolated(nt.clone()), sp));
}
MatchedSeq(..) => {
// We were unable to descend far enough. This is an error.
@@ -399,7 +396,9 @@ fn lockstep_iter_size(
}
TokenTree::MetaVarExpr(_, expr) => {
let default_rslt = LockstepIterSize::Unconstrained;
- let Some(ident) = expr.ident() else { return default_rslt; };
+ let Some(ident) = expr.ident() else {
+ return default_rslt;
+ };
let name = MacroRulesNormalizedIdent::new(ident);
match lookup_cur_matched(name, interpolations, repeats) {
Some(MatchedSeq(ads)) => {