diff options
Diffstat (limited to 'compiler/rustc_expand/src')
-rw-r--r-- | compiler/rustc_expand/src/base.rs | 36 | ||||
-rw-r--r-- | compiler/rustc_expand/src/build.rs | 50 | ||||
-rw-r--r-- | compiler/rustc_expand/src/config.rs | 2 | ||||
-rw-r--r-- | compiler/rustc_expand/src/expand.rs | 42 | ||||
-rw-r--r-- | compiler/rustc_expand/src/mbe.rs | 3 | ||||
-rw-r--r-- | compiler/rustc_expand/src/mbe/diagnostics.rs | 257 | ||||
-rw-r--r-- | compiler/rustc_expand/src/mbe/macro_parser.rs | 99 | ||||
-rw-r--r-- | compiler/rustc_expand/src/mbe/macro_rules.rs | 407 | ||||
-rw-r--r-- | compiler/rustc_expand/src/parse/tests.rs | 2 | ||||
-rw-r--r-- | compiler/rustc_expand/src/placeholders.rs | 14 | ||||
-rw-r--r-- | compiler/rustc_expand/src/proc_macro.rs | 1 | ||||
-rw-r--r-- | compiler/rustc_expand/src/proc_macro_server.rs | 21 | ||||
-rw-r--r-- | compiler/rustc_expand/src/tests.rs | 109 |
13 files changed, 668 insertions, 375 deletions
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index c8de60ccb..9d6a4f9a1 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -16,6 +16,7 @@ use rustc_errors::{ use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT; use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics}; use rustc_parse::{self, parser, MACRO_ARGUMENTS}; +use rustc_session::errors::report_lit_error; use rustc_session::{parse::ParseSess, Limit, Session}; use rustc_span::def_id::{CrateNum, DefId, LocalDefId}; use rustc_span::edition::Edition; @@ -242,14 +243,15 @@ pub enum ExpandResult<T, U> { Retry(U), } -// `meta_item` is the attribute, and `item` is the item being modified. pub trait MultiItemModifier { + /// `meta_item` is the attribute, and `item` is the item being modified. fn expand( &self, ecx: &mut ExtCtxt<'_>, span: Span, meta_item: &ast::MetaItem, item: Annotatable, + is_derive_const: bool, ) -> ExpandResult<Vec<Annotatable>, Annotatable>; } @@ -263,6 +265,7 @@ where span: Span, meta_item: &ast::MetaItem, item: Annotatable, + _is_derive_const: bool, ) -> ExpandResult<Vec<Annotatable>, Annotatable> { ExpandResult::Ready(self(ecx, span, meta_item, item)) } @@ -505,7 +508,7 @@ impl MacResult for MacEager { return Some(p); } if let Some(e) = self.expr { - if let ast::ExprKind::Lit(_) = e.kind { + if matches!(e.kind, ast::ExprKind::Lit(_) | ast::ExprKind::IncludedBytes(_)) { return Some(P(ast::Pat { id: ast::DUMMY_NODE_ID, span: e.span, @@ -674,8 +677,13 @@ pub enum SyntaxExtensionKind { /// A token-based derive macro. Derive( - /// An expander with signature TokenStream -> TokenStream (not yet). + /// An expander with signature TokenStream -> TokenStream. /// The produced TokenSteam is appended to the input TokenSteam. + /// + /// FIXME: The text above describes how this should work. Currently it + /// is handled identically to `LegacyDerive`. It should be migrated to + /// a token-based representation like `Bang` and `Attr`, instead of + /// using `MultiItemModifier`. Box<dyn MultiItemModifier + sync::Sync + sync::Send>, ), @@ -873,7 +881,7 @@ impl SyntaxExtension { /// Error type that denotes indeterminacy. pub struct Indeterminate; -pub type DeriveResolutions = Vec<(ast::Path, Annotatable, Option<Lrc<SyntaxExtension>>)>; +pub type DeriveResolutions = Vec<(ast::Path, Annotatable, Option<Lrc<SyntaxExtension>>, bool)>; pub trait ResolverExpand { fn next_node_id(&mut self) -> NodeId; @@ -952,7 +960,7 @@ pub trait LintStoreExpand { node_id: NodeId, attrs: &[Attribute], items: &[P<Item>], - name: &str, + name: Symbol, ); } @@ -1224,10 +1232,10 @@ pub fn expr_to_spanned_string<'a>( let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr(); Err(match expr.kind { - ast::ExprKind::Lit(ref l) => match l.kind { - ast::LitKind::Str(s, style) => return Ok((s, style, expr.span)), - ast::LitKind::ByteStr(_) => { - let mut err = cx.struct_span_err(l.span, err_msg); + ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) { + Ok(ast::LitKind::Str(s, style)) => return Ok((s, style, expr.span)), + Ok(ast::LitKind::ByteStr(_)) => { + let mut err = cx.struct_span_err(expr.span, err_msg); let span = expr.span.shrink_to_lo(); err.span_suggestion( span.with_hi(span.lo() + BytePos(1)), @@ -1237,8 +1245,12 @@ pub fn expr_to_spanned_string<'a>( ); Some((err, true)) } - ast::LitKind::Err => None, - _ => Some((cx.struct_span_err(l.span, err_msg), false)), + Ok(ast::LitKind::Err) => None, + Err(err) => { + report_lit_error(&cx.sess.parse_sess, err, token_lit, expr.span); + None + } + _ => Some((cx.struct_span_err(expr.span, err_msg), false)), }, ast::ExprKind::Err => None, _ => Some((cx.struct_span_err(expr.span, err_msg), false)), @@ -1433,7 +1445,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool { let crate_matches = if c.starts_with("allsorts-rental") { true } else { - let mut version = c.trim_start_matches("rental-").split("."); + let mut version = c.trim_start_matches("rental-").split('.'); version.next() == Some("0") && version.next() == Some("5") && version diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs index 0952e65cf..4812bdd9d 100644 --- a/compiler/rustc_expand/src/build.rs +++ b/compiler/rustc_expand/src/build.rs @@ -1,13 +1,12 @@ use crate::base::ExtCtxt; - use rustc_ast::attr; use rustc_ast::ptr::P; use rustc_ast::{self as ast, AttrVec, BlockCheckMode, Expr, LocalKind, PatKind, UnOp}; use rustc_data_structures::sync::Lrc; use rustc_span::source_map::Spanned; use rustc_span::symbol::{kw, sym, Ident, Symbol}; - use rustc_span::Span; +use thin_vec::ThinVec; impl<'a> ExtCtxt<'a> { pub fn path(&self, span: Span, strs: Vec<Ident>) -> ast::Path { @@ -28,7 +27,7 @@ impl<'a> ExtCtxt<'a> { ) -> ast::Path { assert!(!idents.is_empty()); let add_root = global && !idents[0].is_path_segment_keyword(); - let mut segments = Vec::with_capacity(idents.len() + add_root as usize); + let mut segments = ThinVec::with_capacity(idents.len() + add_root as usize); if add_root { segments.push(ast::PathSegment::path_root(span)); } @@ -194,7 +193,7 @@ impl<'a> ExtCtxt<'a> { self.stmt_local(local, sp) } - // Generates `let _: Type;`, which is usually used for type assertions. + /// Generates `let _: Type;`, which is usually used for type assertions. pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt { let local = P(ast::Local { pat: self.pat_wild(span), @@ -334,8 +333,8 @@ impl<'a> ExtCtxt<'a> { } fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> { - let lit = ast::Lit::from_lit_kind(lit_kind, span); - self.expr(span, ast::ExprKind::Lit(lit)) + let token_lit = lit_kind.to_token_lit(); + self.expr(span, ast::ExprKind::Lit(token_lit)) } pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> { @@ -532,15 +531,18 @@ impl<'a> ExtCtxt<'a> { // here, but that's not entirely clear. self.expr( span, - ast::ExprKind::Closure( - ast::ClosureBinder::NotPresent, - ast::CaptureBy::Ref, - ast::Async::No, - ast::Movability::Movable, + ast::ExprKind::Closure(Box::new(ast::Closure { + binder: ast::ClosureBinder::NotPresent, + capture_clause: ast::CaptureBy::Ref, + asyncness: ast::Async::No, + movability: ast::Movability::Movable, fn_decl, body, - span, - ), + fn_decl_span: span, + // FIXME(SarthakSingh31): This points to the start of the declaration block and + // not the span of the argument block. + fn_arg_span: span, + })), ) } @@ -580,8 +582,6 @@ impl<'a> ExtCtxt<'a> { attrs: ast::AttrVec, kind: ast::ItemKind, ) -> P<ast::Item> { - // FIXME: Would be nice if our generated code didn't violate - // Rust coding conventions P(ast::Item { ident: name, attrs, @@ -619,11 +619,23 @@ impl<'a> ExtCtxt<'a> { self.item(span, name, AttrVec::new(), ast::ItemKind::Const(def, ty, Some(expr))) } - pub fn attribute(&self, mi: ast::MetaItem) -> ast::Attribute { - attr::mk_attr_outer(&self.sess.parse_sess.attr_id_generator, mi) + // Builds `#[name]`. + pub fn attr_word(&self, name: Symbol, span: Span) -> ast::Attribute { + let g = &self.sess.parse_sess.attr_id_generator; + attr::mk_attr_word(g, ast::AttrStyle::Outer, name, span) + } + + // Builds `#[name = val]`. + // + // Note: `span` is used for both the identifer and the value. + pub fn attr_name_value_str(&self, name: Symbol, val: Symbol, span: Span) -> ast::Attribute { + let g = &self.sess.parse_sess.attr_id_generator; + attr::mk_attr_name_value_str(g, ast::AttrStyle::Outer, name, val, span) } - pub fn meta_word(&self, sp: Span, w: Symbol) -> ast::MetaItem { - attr::mk_word_item(Ident::new(w, sp)) + // Builds `#[outer(inner)]`. + pub fn attr_nested_word(&self, outer: Symbol, inner: Symbol, span: Span) -> ast::Attribute { + let g = &self.sess.parse_sess.attr_id_generator; + attr::mk_attr_nested_word(g, ast::AttrStyle::Outer, outer, inner, span) } } diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 1d2b1298a..2510795c2 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -200,7 +200,7 @@ fn get_features( features } -// `cfg_attr`-process the crate's attributes and compute the crate's features. +/// `cfg_attr`-process the crate's attributes and compute the crate's features. pub fn features( sess: &Session, mut krate: ast::Crate, diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 57713fb3c..1014ec220 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -1,7 +1,7 @@ use crate::base::*; use crate::config::StripUnconfigured; use crate::hygiene::SyntaxContext; -use crate::mbe::macro_rules::annotate_err_with_kind; +use crate::mbe::diagnostics::annotate_err_with_kind; use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod}; use crate::placeholders::{placeholder, PlaceholderExpander}; @@ -11,9 +11,9 @@ use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::visit::{self, AssocCtxt, Visitor}; -use rustc_ast::{AssocItemKind, AstNodeWrapper, AttrStyle, AttrVec, ExprKind, ForeignItemKind}; -use rustc_ast::{HasAttrs, HasNodeId}; -use rustc_ast::{Inline, ItemKind, MacArgs, MacStmtStyle, MetaItemKind, ModKind}; +use rustc_ast::{AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind}; +use rustc_ast::{ForeignItemKind, HasAttrs, HasNodeId}; +use rustc_ast::{Inline, ItemKind, MacStmtStyle, MetaItemKind, ModKind}; use rustc_ast::{NestedMetaItem, NodeId, PatKind, StmtKind, TyKind}; use rustc_ast_pretty::pprust; use rustc_data_structures::map_in_place::MapInPlace; @@ -337,6 +337,7 @@ pub enum InvocationKind { }, Derive { path: ast::Path, + is_const: bool, item: Annotatable, }, } @@ -400,7 +401,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { krate } - // Recursively expand all macro invocations in this AST fragment. + /// Recursively expand all macro invocations in this AST fragment. pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment { let orig_expansion_data = self.cx.current_expansion.clone(); let orig_force_mode = self.cx.force_mode; @@ -478,13 +479,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> { derive_invocations.reserve(derives.len()); derives .into_iter() - .map(|(path, item, _exts)| { + .map(|(path, item, _exts, is_const)| { // FIXME: Consider using the derive resolutions (`_exts`) // instead of enqueuing the derives to be resolved again later. let expn_id = LocalExpnId::fresh_empty(); derive_invocations.push(( Invocation { - kind: InvocationKind::Derive { path, item }, + kind: InvocationKind::Derive { path, item, is_const }, fragment_kind, expansion_data: ExpansionData { id: expn_id, @@ -653,7 +654,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { ExpandResult::Ready(match invoc.kind { InvocationKind::Bang { mac, .. } => match ext { SyntaxExtensionKind::Bang(expander) => { - let Ok(tok_result) = expander.expand(self.cx, span, mac.args.inner_tokens()) else { + let Ok(tok_result) = expander.expand(self.cx, span, mac.args.tokens.clone()) else { return ExpandResult::Ready(fragment_kind.dummy(span)); }; self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span) @@ -661,7 +662,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtensionKind::LegacyBang(expander) => { let prev = self.cx.current_expansion.prior_type_ascription; self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription; - let tok_result = expander.expand(self.cx, span, mac.args.inner_tokens()); + let tok_result = expander.expand(self.cx, span, mac.args.tokens.clone()); let result = if let Some(result) = fragment_kind.make_from(tok_result) { result } else { @@ -705,7 +706,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { _ => item.to_tokens(), }; let attr_item = attr.unwrap_normal_item(); - if let MacArgs::Eq(..) = attr_item.args { + if let AttrArgs::Eq(..) = attr_item.args { self.cx.span_err(span, "key-value macro attributes are not supported"); } let inner_tokens = attr_item.args.inner_tokens(); @@ -717,7 +718,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtensionKind::LegacyAttr(expander) => { match validate_attr::parse_meta(&self.cx.sess.parse_sess, &attr) { Ok(meta) => { - let items = match expander.expand(self.cx, span, &meta, item) { + let items = match expander.expand(self.cx, span, &meta, item, false) { ExpandResult::Ready(items) => items, ExpandResult::Retry(item) => { // Reassemble the original invocation for retrying. @@ -749,19 +750,19 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } _ => unreachable!(), }, - InvocationKind::Derive { path, item } => match ext { + InvocationKind::Derive { path, item, is_const } => match ext { SyntaxExtensionKind::Derive(expander) | SyntaxExtensionKind::LegacyDerive(expander) => { if let SyntaxExtensionKind::Derive(..) = ext { self.gate_proc_macro_input(&item); } let meta = ast::MetaItem { kind: MetaItemKind::Word, span, path }; - let items = match expander.expand(self.cx, span, &meta, item) { + let items = match expander.expand(self.cx, span, &meta, item, is_const) { ExpandResult::Ready(items) => items, ExpandResult::Retry(item) => { // Reassemble the original invocation for retrying. return ExpandResult::Retry(Invocation { - kind: InvocationKind::Derive { path: meta.path, item }, + kind: InvocationKind::Derive { path: meta.path, item, is_const }, ..invoc }); } @@ -1121,7 +1122,7 @@ impl InvocationCollectorNode for P<ast::Item> { ecx.current_expansion.lint_node_id, &attrs, &items, - ident.name.as_str(), + ident.name, ); } @@ -1643,7 +1644,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { let mut span: Option<Span> = None; while let Some(attr) = attrs.next() { rustc_ast_passes::feature_gate::check_attribute(attr, self.cx.sess, features); - validate_attr::check_meta(&self.cx.sess.parse_sess, attr); + validate_attr::check_attr(&self.cx.sess.parse_sess, attr); let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span }; span = Some(current_span); @@ -1930,9 +1931,12 @@ pub struct ExpansionConfig<'feat> { pub features: Option<&'feat Features>, pub recursion_limit: Limit, pub trace_mac: bool, - pub should_test: bool, // If false, strip `#[test]` nodes - pub span_debug: bool, // If true, use verbose debugging for `proc_macro::Span` - pub proc_macro_backtrace: bool, // If true, show backtraces for proc-macro panics + /// If false, strip `#[test]` nodes + pub should_test: bool, + /// If true, use verbose debugging for `proc_macro::Span` + pub span_debug: bool, + /// If true, show backtraces for proc-macro panics + pub proc_macro_backtrace: bool, } impl<'feat> ExpansionConfig<'feat> { diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs index f42576b16..a43b2a001 100644 --- a/compiler/rustc_expand/src/mbe.rs +++ b/compiler/rustc_expand/src/mbe.rs @@ -3,6 +3,7 @@ //! why we call this module `mbe`. For external documentation, prefer the //! official terminology: "declarative macros". +pub(crate) mod diagnostics; pub(crate) mod macro_check; pub(crate) mod macro_parser; pub(crate) mod macro_rules; @@ -52,7 +53,7 @@ impl KleeneToken { /// A Kleene-style [repetition operator](https://en.wikipedia.org/wiki/Kleene_star) /// for token sequences. #[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy)] -enum KleeneOp { +pub(crate) enum KleeneOp { /// Kleene star (`*`) for zero or more repetitions ZeroOrMore, /// Kleene plus (`+`) for one or more repetitions diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs new file mode 100644 index 000000000..197f05691 --- /dev/null +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -0,0 +1,257 @@ +use std::borrow::Cow; + +use crate::base::{DummyResult, ExtCtxt, MacResult}; +use crate::expand::{parse_ast_fragment, AstFragmentKind}; +use crate::mbe::{ + macro_parser::{MatcherLoc, NamedParseResult, ParseResult::*, TtParser}, + macro_rules::{try_match_macro, Tracker}, +}; +use rustc_ast::token::{self, Token}; +use rustc_ast::tokenstream::TokenStream; +use rustc_ast_pretty::pprust; +use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage}; +use rustc_parse::parser::{Parser, Recovery}; +use rustc_span::source_map::SourceMap; +use rustc_span::symbol::Ident; +use rustc_span::Span; + +use super::macro_rules::{parser_from_cx, NoopTracker}; + +pub(super) fn failed_to_match_macro<'cx>( + cx: &'cx mut ExtCtxt<'_>, + sp: Span, + def_span: Span, + name: Ident, + arg: TokenStream, + lhses: &[Vec<MatcherLoc>], +) -> Box<dyn MacResult + 'cx> { + let sess = &cx.sess.parse_sess; + + // An error occurred, try the expansion again, tracking the expansion closely for better diagnostics. + let mut tracker = CollectTrackerAndEmitter::new(cx, sp); + + let try_success_result = try_match_macro(sess, name, &arg, lhses, &mut tracker); + + if try_success_result.is_ok() { + // Nonterminal parser recovery might turn failed matches into successful ones, + // but for that it must have emitted an error already + tracker.cx.sess.delay_span_bug(sp, "Macro matching returned a success on the second try"); + } + + if let Some(result) = tracker.result { + // An irrecoverable error occurred and has been emitted. + return result; + } + + let Some((token, label, remaining_matcher)) = tracker.best_failure else { + return DummyResult::any(sp); + }; + + let span = token.span.substitute_dummy(sp); + + let mut err = cx.struct_span_err(span, &parse_failure_msg(&token)); + err.span_label(span, label); + if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) { + err.span_label(cx.source_map().guess_head_span(def_span), "when calling this macro"); + } + + annotate_doc_comment(&mut err, sess.source_map(), span); + + if let Some(span) = remaining_matcher.span() { + err.span_note(span, format!("while trying to match {remaining_matcher}")); + } else { + err.note(format!("while trying to match {remaining_matcher}")); + } + + // Check whether there's a missing comma in this macro call, like `println!("{}" a);` + if let Some((arg, comma_span)) = arg.add_comma() { + for lhs in lhses { + let parser = parser_from_cx(sess, arg.clone(), Recovery::Allowed); + let mut tt_parser = TtParser::new(name); + + if let Success(_) = + tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, &mut NoopTracker) + { + if comma_span.is_dummy() { + err.note("you might be missing a comma"); + } else { + err.span_suggestion_short( + comma_span, + "missing comma here", + ", ", + Applicability::MachineApplicable, + ); + } + } + } + } + err.emit(); + cx.trace_macros_diag(); + DummyResult::any(sp) +} + +/// The tracker used for the slow error path that collects useful info for diagnostics. +struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> { + cx: &'a mut ExtCtxt<'cx>, + remaining_matcher: Option<&'matcher MatcherLoc>, + /// Which arm's failure should we report? (the one furthest along) + best_failure: Option<(Token, &'static str, MatcherLoc)>, + root_span: Span, + result: Option<Box<dyn MacResult + 'cx>>, +} + +impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> { + fn before_match_loc(&mut self, parser: &TtParser, matcher: &'matcher MatcherLoc) { + if self.remaining_matcher.is_none() + || (parser.has_no_remaining_items_for_step() && *matcher != MatcherLoc::Eof) + { + self.remaining_matcher = Some(matcher); + } + } + + fn after_arm(&mut self, result: &NamedParseResult) { + match result { + Success(_) => { + // Nonterminal parser recovery might turn failed matches into successful ones, + // but for that it must have emitted an error already + self.cx.sess.delay_span_bug( + self.root_span, + "should not collect detailed info for successful macro match", + ); + } + Failure(token, msg) => match self.best_failure { + Some((ref best_token, _, _)) if best_token.span.lo() >= token.span.lo() => {} + _ => { + self.best_failure = Some(( + token.clone(), + msg, + self.remaining_matcher + .expect("must have collected matcher already") + .clone(), + )) + } + }, + Error(err_sp, msg) => { + let span = err_sp.substitute_dummy(self.root_span); + self.cx.struct_span_err(span, msg).emit(); + self.result = Some(DummyResult::any(span)); + } + ErrorReported(_) => self.result = Some(DummyResult::any(self.root_span)), + } + } + + fn description() -> &'static str { + "detailed" + } + + fn recovery() -> Recovery { + Recovery::Allowed + } +} + +impl<'a, 'cx> CollectTrackerAndEmitter<'a, 'cx, '_> { + fn new(cx: &'a mut ExtCtxt<'cx>, root_span: Span) -> Self { + Self { cx, remaining_matcher: None, best_failure: None, root_span, result: None } + } +} + +pub(super) fn emit_frag_parse_err( + mut e: DiagnosticBuilder<'_, rustc_errors::ErrorGuaranteed>, + parser: &Parser<'_>, + orig_parser: &mut Parser<'_>, + site_span: Span, + arm_span: Span, + kind: AstFragmentKind, +) { + // FIXME(davidtwco): avoid depending on the error message text + if parser.token == token::Eof + && let DiagnosticMessage::Str(message) = &e.message[0].0 + && message.ends_with(", found `<eof>`") + { + let msg = &e.message[0]; + e.message[0] = ( + DiagnosticMessage::Str(format!( + "macro expansion ends with an incomplete expression: {}", + message.replace(", found `<eof>`", ""), + )), + msg.1, + ); + if !e.span.is_dummy() { + // early end of macro arm (#52866) + e.replace_span_with(parser.token.span.shrink_to_hi()); + } + } + if e.span.is_dummy() { + // Get around lack of span in error (#30128) + e.replace_span_with(site_span); + if !parser.sess.source_map().is_imported(arm_span) { + e.span_label(arm_span, "in this macro arm"); + } + } else if parser.sess.source_map().is_imported(parser.token.span) { + e.span_label(site_span, "in this macro invocation"); + } + match kind { + // Try a statement if an expression is wanted but failed and suggest adding `;` to call. + AstFragmentKind::Expr => match parse_ast_fragment(orig_parser, AstFragmentKind::Stmts) { + Err(err) => err.cancel(), + Ok(_) => { + e.note( + "the macro call doesn't expand to an expression, but it can expand to a statement", + ); + e.span_suggestion_verbose( + site_span.shrink_to_hi(), + "add `;` to interpret the expansion as a statement", + ";", + Applicability::MaybeIncorrect, + ); + } + }, + _ => annotate_err_with_kind(&mut e, kind, site_span), + }; + e.emit(); +} + +pub(crate) fn annotate_err_with_kind(err: &mut Diagnostic, kind: AstFragmentKind, span: Span) { + match kind { + AstFragmentKind::Ty => { + err.span_label(span, "this macro call doesn't expand to a type"); + } + AstFragmentKind::Pat => { + err.span_label(span, "this macro call doesn't expand to a pattern"); + } + _ => {} + }; +} + +#[derive(Subdiagnostic)] +enum ExplainDocComment { + #[label(expand_explain_doc_comment_inner)] + Inner { + #[primary_span] + span: Span, + }, + #[label(expand_explain_doc_comment_outer)] + Outer { + #[primary_span] + span: Span, + }, +} + +pub(super) fn annotate_doc_comment(err: &mut Diagnostic, sm: &SourceMap, span: Span) { + if let Ok(src) = sm.span_to_snippet(span) { + if src.starts_with("///") || src.starts_with("/**") { + err.subdiagnostic(ExplainDocComment::Outer { span }); + } else if src.starts_with("//!") || src.starts_with("/*!") { + err.subdiagnostic(ExplainDocComment::Inner { span }); + } + } +} + +/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For +/// other tokens, this is "unexpected token...". +pub(super) fn parse_failure_msg(tok: &Token) -> String { + match tok.kind { + token::Eof => "unexpected end of macro invocation".to_string(), + _ => format!("no rules expected the token `{}`", pprust::token_to_string(tok),), + } +} diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs index c8bdc3931..d161868ed 100644 --- a/compiler/rustc_expand/src/mbe/macro_parser.rs +++ b/compiler/rustc_expand/src/mbe/macro_parser.rs @@ -73,19 +73,21 @@ pub(crate) use NamedMatch::*; pub(crate) use ParseResult::*; -use crate::mbe::{KleeneOp, TokenTree}; +use crate::mbe::{macro_rules::Tracker, KleeneOp, TokenTree}; use rustc_ast::token::{self, DocComment, Nonterminal, NonterminalKind, Token}; +use rustc_ast_pretty::pprust; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sync::Lrc; +use rustc_errors::ErrorGuaranteed; use rustc_lint_defs::pluralize; use rustc_parse::parser::{NtOrTt, Parser}; +use rustc_span::symbol::Ident; use rustc_span::symbol::MacroRulesNormalizedIdent; use rustc_span::Span; - -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::Lrc; -use rustc_span::symbol::Ident; use std::borrow::Cow; use std::collections::hash_map::Entry::{Occupied, Vacant}; +use std::fmt::Display; /// A unit within a matcher that a `MatcherPos` can refer to. Similar to (and derived from) /// `mbe::TokenTree`, but designed specifically for fast and easy traversal during matching. @@ -96,7 +98,8 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; /// /// This means a matcher can be represented by `&[MatcherLoc]`, and traversal mostly involves /// simply incrementing the current matcher position index by one. -pub(super) enum MatcherLoc { +#[derive(Debug, PartialEq, Clone)] +pub(crate) enum MatcherLoc { Token { token: Token, }, @@ -128,6 +131,46 @@ pub(super) enum MatcherLoc { Eof, } +impl MatcherLoc { + pub(super) fn span(&self) -> Option<Span> { + match self { + MatcherLoc::Token { token } => Some(token.span), + MatcherLoc::Delimited => None, + MatcherLoc::Sequence { .. } => None, + MatcherLoc::SequenceKleeneOpNoSep { .. } => None, + MatcherLoc::SequenceSep { .. } => None, + MatcherLoc::SequenceKleeneOpAfterSep { .. } => None, + MatcherLoc::MetaVarDecl { span, .. } => Some(*span), + MatcherLoc::Eof => None, + } + } +} + +impl Display for MatcherLoc { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + MatcherLoc::Token { token } | MatcherLoc::SequenceSep { separator: token } => { + write!(f, "`{}`", pprust::token_to_string(token)) + } + MatcherLoc::MetaVarDecl { bind, kind, .. } => { + write!(f, "meta-variable `${bind}")?; + if let Some(kind) = kind { + write!(f, ":{}", kind)?; + } + write!(f, "`")?; + Ok(()) + } + MatcherLoc::Eof => f.write_str("end of macro"), + + // These are not printed in the diagnostic + MatcherLoc::Delimited => f.write_str("delimiter"), + MatcherLoc::Sequence { .. } => f.write_str("sequence start"), + MatcherLoc::SequenceKleeneOpNoSep { .. } => f.write_str("sequence end"), + MatcherLoc::SequenceKleeneOpAfterSep { .. } => f.write_str("sequence end"), + } + } +} + pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> { fn inner( tts: &[TokenTree], @@ -270,13 +313,17 @@ pub(crate) enum ParseResult<T> { Failure(Token, &'static str), /// Fatal error (malformed macro?). Abort compilation. Error(rustc_span::Span, String), - ErrorReported, + ErrorReported(ErrorGuaranteed), } /// A `ParseResult` where the `Success` variant contains a mapping of /// `MacroRulesNormalizedIdent`s to `NamedMatch`es. This represents the mapping /// of metavars to the token trees they bind to. -pub(crate) type NamedParseResult = ParseResult<FxHashMap<MacroRulesNormalizedIdent, NamedMatch>>; +pub(crate) type NamedParseResult = ParseResult<NamedMatches>; + +/// Contains a mapping of `MacroRulesNormalizedIdent`s to `NamedMatch`es. +/// This represents the mapping of metavars to the token trees they bind to. +pub(crate) type NamedMatches = FxHashMap<MacroRulesNormalizedIdent, NamedMatch>; /// Count how many metavars declarations are in `matcher`. pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize { @@ -393,6 +440,10 @@ impl TtParser { } } + pub(super) fn has_no_remaining_items_for_step(&self) -> bool { + self.cur_mps.is_empty() + } + /// Process the matcher positions of `cur_mps` until it is empty. In the process, this will /// produce more mps in `next_mps` and `bb_mps`. /// @@ -400,17 +451,21 @@ impl TtParser { /// /// `Some(result)` if everything is finished, `None` otherwise. Note that matches are kept /// track of through the mps generated. - fn parse_tt_inner( + fn parse_tt_inner<'matcher, T: Tracker<'matcher>>( &mut self, - matcher: &[MatcherLoc], + matcher: &'matcher [MatcherLoc], token: &Token, + track: &mut T, ) -> Option<NamedParseResult> { // Matcher positions that would be valid if the macro invocation was over now. Only // modified if `token == Eof`. let mut eof_mps = EofMatcherPositions::None; while let Some(mut mp) = self.cur_mps.pop() { - match &matcher[mp.idx] { + let matcher_loc = &matcher[mp.idx]; + track.before_match_loc(self, matcher_loc); + + match matcher_loc { MatcherLoc::Token { token: t } => { // If it's a doc comment, we just ignore it and move on to the next tt in the // matcher. This is a bug, but #95267 showed that existing programs rely on @@ -450,7 +505,7 @@ impl TtParser { // Try zero matches of this sequence, by skipping over it. self.cur_mps.push(MatcherPos { idx: idx_first_after, - matches: mp.matches.clone(), // a cheap clone + matches: Lrc::clone(&mp.matches), }); } @@ -463,8 +518,8 @@ impl TtParser { // sequence. If that's not possible, `ending_mp` will fail quietly when it is // processed next time around the loop. let ending_mp = MatcherPos { - idx: mp.idx + 1, // +1 skips the Kleene op - matches: mp.matches.clone(), // a cheap clone + idx: mp.idx + 1, // +1 skips the Kleene op + matches: Lrc::clone(&mp.matches), }; self.cur_mps.push(ending_mp); @@ -479,8 +534,8 @@ impl TtParser { // separator yet. Try ending the sequence. If that's not possible, `ending_mp` // will fail quietly when it is processed next time around the loop. let ending_mp = MatcherPos { - idx: mp.idx + 2, // +2 skips the separator and the Kleene op - matches: mp.matches.clone(), // a cheap clone + idx: mp.idx + 2, // +2 skips the separator and the Kleene op + matches: Lrc::clone(&mp.matches), }; self.cur_mps.push(ending_mp); @@ -552,10 +607,11 @@ impl TtParser { } /// Match the token stream from `parser` against `matcher`. - pub(super) fn parse_tt( + pub(super) fn parse_tt<'matcher, T: Tracker<'matcher>>( &mut self, parser: &mut Cow<'_, Parser<'_>>, - matcher: &[MatcherLoc], + matcher: &'matcher [MatcherLoc], + track: &mut T, ) -> NamedParseResult { // A queue of possible matcher positions. We initialize it with the matcher position in // which the "dot" is before the first token of the first token tree in `matcher`. @@ -571,7 +627,8 @@ impl TtParser { // Process `cur_mps` until either we have finished the input or we need to get some // parsing from the black-box parser done. - if let Some(res) = self.parse_tt_inner(matcher, &parser.token) { + let res = self.parse_tt_inner(matcher, &parser.token, track); + if let Some(res) = res { return res; } @@ -612,14 +669,14 @@ impl TtParser { // edition-specific matching behavior for non-terminals. let nt = match parser.to_mut().parse_nonterminal(kind) { Err(mut err) => { - err.span_label( + let guarantee = err.span_label( span, format!( "while parsing argument for this `{kind}` macro fragment" ), ) .emit(); - return ErrorReported; + return ErrorReported(guarantee); } Ok(nt) => nt, }; diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index f6fe38174..2dbb90e21 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -2,6 +2,7 @@ use crate::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander}; use crate::base::{SyntaxExtension, SyntaxExtensionKind}; use crate::expand::{ensure_complete_parse, parse_ast_fragment, AstFragment, AstFragmentKind}; use crate::mbe; +use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg}; use crate::mbe::macro_check; use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser}; use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc}; @@ -14,18 +15,17 @@ use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; use rustc_attr::{self as attr, TransparencyError}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; -use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage}; +use rustc_errors::{Applicability, ErrorGuaranteed}; use rustc_feature::Features; use rustc_lint_defs::builtin::{ RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS, }; use rustc_lint_defs::BuiltinLintDiagnostics; -use rustc_parse::parser::Parser; +use rustc_parse::parser::{Parser, Recovery}; use rustc_session::parse::ParseSess; use rustc_session::Session; use rustc_span::edition::Edition; use rustc_span::hygiene::Transparency; -use rustc_span::source_map::SourceMap; use rustc_span::symbol::{kw, sym, Ident, MacroRulesNormalizedIdent}; use rustc_span::Span; @@ -33,6 +33,9 @@ use std::borrow::Cow; use std::collections::hash_map::Entry; use std::{mem, slice}; +use super::diagnostics; +use super::macro_parser::{NamedMatches, NamedParseResult}; + pub(crate) struct ParserAnyMacro<'a> { parser: Parser<'a>, @@ -47,74 +50,6 @@ pub(crate) struct ParserAnyMacro<'a> { is_local: bool, } -pub(crate) fn annotate_err_with_kind(err: &mut Diagnostic, kind: AstFragmentKind, span: Span) { - match kind { - AstFragmentKind::Ty => { - err.span_label(span, "this macro call doesn't expand to a type"); - } - AstFragmentKind::Pat => { - err.span_label(span, "this macro call doesn't expand to a pattern"); - } - _ => {} - }; -} - -fn emit_frag_parse_err( - mut e: DiagnosticBuilder<'_, rustc_errors::ErrorGuaranteed>, - parser: &Parser<'_>, - orig_parser: &mut Parser<'_>, - site_span: Span, - arm_span: Span, - kind: AstFragmentKind, -) { - // FIXME(davidtwco): avoid depending on the error message text - if parser.token == token::Eof - && let DiagnosticMessage::Str(message) = &e.message[0].0 - && message.ends_with(", found `<eof>`") - { - let msg = &e.message[0]; - e.message[0] = ( - DiagnosticMessage::Str(format!( - "macro expansion ends with an incomplete expression: {}", - message.replace(", found `<eof>`", ""), - )), - msg.1, - ); - if !e.span.is_dummy() { - // early end of macro arm (#52866) - e.replace_span_with(parser.token.span.shrink_to_hi()); - } - } - if e.span.is_dummy() { - // Get around lack of span in error (#30128) - e.replace_span_with(site_span); - if !parser.sess.source_map().is_imported(arm_span) { - e.span_label(arm_span, "in this macro arm"); - } - } else if parser.sess.source_map().is_imported(parser.token.span) { - e.span_label(site_span, "in this macro invocation"); - } - match kind { - // Try a statement if an expression is wanted but failed and suggest adding `;` to call. - AstFragmentKind::Expr => match parse_ast_fragment(orig_parser, AstFragmentKind::Stmts) { - Err(err) => err.cancel(), - Ok(_) => { - e.note( - "the macro call doesn't expand to an expression, but it can expand to a statement", - ); - e.span_suggestion_verbose( - site_span.shrink_to_hi(), - "add `;` to interpret the expansion as a statement", - ";", - Applicability::MaybeIncorrect, - ); - } - }, - _ => annotate_err_with_kind(&mut e, kind, site_span), - }; - e.emit(); -} - impl<'a> ParserAnyMacro<'a> { pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment { let ParserAnyMacro { @@ -130,7 +65,7 @@ impl<'a> ParserAnyMacro<'a> { let fragment = match parse_ast_fragment(parser, kind) { Ok(f) => f, Err(err) => { - emit_frag_parse_err(err, parser, snapshot, site_span, arm_span, kind); + diagnostics::emit_frag_parse_err(err, parser, snapshot, site_span, arm_span, kind); return kind.dummy(site_span); } }; @@ -205,8 +140,37 @@ fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span cx_expansions.entry(sp).or_default().push(message); } +pub(super) trait Tracker<'matcher> { + /// This is called before trying to match next MatcherLoc on the current token. + fn before_match_loc(&mut self, parser: &TtParser, matcher: &'matcher MatcherLoc); + + /// This is called after an arm has been parsed, either successfully or unsuccessfully. When this is called, + /// `before_match_loc` was called at least once (with a `MatcherLoc::Eof`). + fn after_arm(&mut self, result: &NamedParseResult); + + /// For tracing. + fn description() -> &'static str; + + fn recovery() -> Recovery; +} + +/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to monomorphization. +pub(super) struct NoopTracker; + +impl<'matcher> Tracker<'matcher> for NoopTracker { + fn before_match_loc(&mut self, _: &TtParser, _: &'matcher MatcherLoc) {} + fn after_arm(&mut self, _: &NamedParseResult) {} + fn description() -> &'static str { + "none" + } + fn recovery() -> Recovery { + Recovery::Forbidden + } +} + /// Expands the rules based macro defined by `lhses` and `rhses` for a given /// input `arg`. +#[instrument(skip(cx, transparency, arg, lhses, rhses))] fn expand_macro<'cx>( cx: &'cx mut ExtCtxt<'_>, sp: Span, @@ -228,9 +192,96 @@ fn expand_macro<'cx>( trace_macros_note(&mut cx.expansions, sp, msg); } - // Which arm's failure should we report? (the one furthest along) - let mut best_failure: Option<(Token, &str)> = None; + // Track nothing for the best performance. + let try_success_result = try_match_macro(sess, name, &arg, lhses, &mut NoopTracker); + + match try_success_result { + Ok((i, named_matches)) => { + let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] { + mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span), + _ => cx.span_bug(sp, "malformed macro rhs"), + }; + let arm_span = rhses[i].span(); + + let rhs_spans = rhs.tts.iter().map(|t| t.span()).collect::<Vec<_>>(); + // rhs has holes ( `$id` and `$(...)` that need filled) + let mut tts = match transcribe(cx, &named_matches, &rhs, rhs_span, transparency) { + Ok(tts) => tts, + Err(mut err) => { + err.emit(); + return DummyResult::any(arm_span); + } + }; + + // Replace all the tokens for the corresponding positions in the macro, to maintain + // proper positions in error reporting, while maintaining the macro_backtrace. + if rhs_spans.len() == tts.len() { + tts = tts.map_enumerated(|i, tt| { + let mut tt = tt.clone(); + let mut sp = rhs_spans[i]; + sp = sp.with_ctxt(tt.span().ctxt()); + tt.set_span(sp); + tt + }); + } + + if cx.trace_macros() { + let msg = format!("to `{}`", pprust::tts_to_string(&tts)); + trace_macros_note(&mut cx.expansions, sp, msg); + } + + let mut p = Parser::new(sess, tts, false, None); + p.last_type_ascription = cx.current_expansion.prior_type_ascription; + + if is_local { + cx.resolver.record_macro_rule_usage(node_id, i); + } + + // Let the context choose how to interpret the result. + // Weird, but useful for X-macros. + return Box::new(ParserAnyMacro { + parser: p, + + // Pass along the original expansion site and the name of the macro + // so we can print a useful error message if the parse of the expanded + // macro leaves unparsed tokens. + site_span: sp, + macro_ident: name, + lint_node_id: cx.current_expansion.lint_node_id, + is_trailing_mac: cx.current_expansion.is_trailing_mac, + arm_span, + is_local, + }); + } + Err(CanRetry::No(_)) => { + debug!("Will not retry matching as an error was emitted already"); + return DummyResult::any(sp); + } + Err(CanRetry::Yes) => { + // Retry and emit a better error below. + } + } + + diagnostics::failed_to_match_macro(cx, sp, def_span, name, arg, lhses) +} + +pub(super) enum CanRetry { + Yes, + /// We are not allowed to retry macro expansion as a fatal error has been emitted already. + No(ErrorGuaranteed), +} +/// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful, +/// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors +/// correctly. +#[instrument(level = "debug", skip(sess, arg, lhses, track), fields(tracking = %T::description()))] +pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( + sess: &ParseSess, + name: Ident, + arg: &TokenStream, + lhses: &'matcher [Vec<MatcherLoc>], + track: &mut T, +) -> Result<(usize, NamedMatches), CanRetry> { // We create a base parser that can be used for the "black box" parts. // Every iteration needs a fresh copy of that parser. However, the parser // is not mutated on many of the iterations, particularly when dealing with @@ -250,127 +301,53 @@ fn expand_macro<'cx>( // hacky, but speeds up the `html5ever` benchmark significantly. (Issue // 68836 suggests a more comprehensive but more complex change to deal with // this situation.) - // FIXME(Nilstrieb): Stop recovery from happening on this parser and retry later with recovery if the macro failed to match. - let parser = parser_from_cx(sess, arg.clone()); - + let parser = parser_from_cx(sess, arg.clone(), T::recovery()); // Try each arm's matchers. let mut tt_parser = TtParser::new(name); for (i, lhs) in lhses.iter().enumerate() { + let _tracing_span = trace_span!("Matching arm", %i); + // Take a snapshot of the state of pre-expansion gating at this point. // This is used so that if a matcher is not `Success(..)`ful, // then the spans which became gated when parsing the unsuccessful matcher // are not recorded. On the first `Success(..)`ful matcher, the spans are merged. let mut gated_spans_snapshot = mem::take(&mut *sess.gated_spans.spans.borrow_mut()); - match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs) { + let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track); + + track.after_arm(&result); + + match result { Success(named_matches) => { + debug!("Parsed arm successfully"); // The matcher was `Success(..)`ful. // Merge the gated spans from parsing the matcher with the pre-existing ones. sess.gated_spans.merge(gated_spans_snapshot); - let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] { - mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span), - _ => cx.span_bug(sp, "malformed macro rhs"), - }; - let arm_span = rhses[i].span(); - - let rhs_spans = rhs.tts.iter().map(|t| t.span()).collect::<Vec<_>>(); - // rhs has holes ( `$id` and `$(...)` that need filled) - let mut tts = match transcribe(cx, &named_matches, &rhs, rhs_span, transparency) { - Ok(tts) => tts, - Err(mut err) => { - err.emit(); - return DummyResult::any(arm_span); - } - }; - - // Replace all the tokens for the corresponding positions in the macro, to maintain - // proper positions in error reporting, while maintaining the macro_backtrace. - if rhs_spans.len() == tts.len() { - tts = tts.map_enumerated(|i, tt| { - let mut tt = tt.clone(); - let mut sp = rhs_spans[i]; - sp = sp.with_ctxt(tt.span().ctxt()); - tt.set_span(sp); - tt - }); - } - - if cx.trace_macros() { - let msg = format!("to `{}`", pprust::tts_to_string(&tts)); - trace_macros_note(&mut cx.expansions, sp, msg); - } - - let mut p = Parser::new(sess, tts, false, None); - p.last_type_ascription = cx.current_expansion.prior_type_ascription; - - if is_local { - cx.resolver.record_macro_rule_usage(node_id, i); - } - - // Let the context choose how to interpret the result. - // Weird, but useful for X-macros. - return Box::new(ParserAnyMacro { - parser: p, - - // Pass along the original expansion site and the name of the macro - // so we can print a useful error message if the parse of the expanded - // macro leaves unparsed tokens. - site_span: sp, - macro_ident: name, - lint_node_id: cx.current_expansion.lint_node_id, - is_trailing_mac: cx.current_expansion.is_trailing_mac, - arm_span, - is_local, - }); + return Ok((i, named_matches)); } - Failure(token, msg) => match best_failure { - Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {} - _ => best_failure = Some((token, msg)), - }, - Error(err_sp, ref msg) => { - let span = err_sp.substitute_dummy(sp); - cx.struct_span_err(span, &msg).emit(); - return DummyResult::any(span); + Failure(_, _) => { + trace!("Failed to match arm, trying the next one"); + // Try the next arm. + } + Error(_, _) => { + debug!("Fatal error occurred during matching"); + // We haven't emitted an error yet, so we can retry. + return Err(CanRetry::Yes); + } + ErrorReported(guarantee) => { + debug!("Fatal error occurred and was reported during matching"); + // An error has been reported already, we cannot retry as that would cause duplicate errors. + return Err(CanRetry::No(guarantee)); } - ErrorReported => return DummyResult::any(sp), } // The matcher was not `Success(..)`ful. // Restore to the state before snapshotting and maybe try again. mem::swap(&mut gated_spans_snapshot, &mut sess.gated_spans.spans.borrow_mut()); } - drop(parser); - - let (token, label) = best_failure.expect("ran no matchers"); - let span = token.span.substitute_dummy(sp); - let mut err = cx.struct_span_err(span, &parse_failure_msg(&token)); - err.span_label(span, label); - if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) { - err.span_label(cx.source_map().guess_head_span(def_span), "when calling this macro"); - } - annotate_doc_comment(&mut err, sess.source_map(), span); - // Check whether there's a missing comma in this macro call, like `println!("{}" a);` - if let Some((arg, comma_span)) = arg.add_comma() { - for lhs in lhses { - let parser = parser_from_cx(sess, arg.clone()); - if let Success(_) = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs) { - if comma_span.is_dummy() { - err.note("you might be missing a comma"); - } else { - err.span_suggestion_short( - comma_span, - "missing comma here", - ", ", - Applicability::MachineApplicable, - ); - } - } - } - } - err.emit(); - cx.trace_macros_diag(); - DummyResult::any(sp) + + Err(CanRetry::Yes) } // Note that macro-by-example's input is also matched against a token tree: @@ -406,7 +383,7 @@ pub fn compile_declarative_macro( // Parse the macro_rules! invocation let (macro_rules, body) = match &def.kind { - ast::ItemKind::MacroDef(def) => (def.macro_rules, def.body.inner_tokens()), + ast::ItemKind::MacroDef(def) => (def.macro_rules, def.body.tokens.clone()), _ => unreachable!(), }; @@ -452,28 +429,29 @@ pub fn compile_declarative_macro( let parser = Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS); let mut tt_parser = TtParser::new(Ident::with_dummy_span(if macro_rules { kw::MacroRules } else { kw::Macro })); - let argument_map = match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &argument_gram) { - Success(m) => m, - Failure(token, msg) => { - let s = parse_failure_msg(&token); - let sp = token.span.substitute_dummy(def.span); - let mut err = sess.parse_sess.span_diagnostic.struct_span_err(sp, &s); - err.span_label(sp, msg); - annotate_doc_comment(&mut err, sess.source_map(), sp); - err.emit(); - return dummy_syn_ext(); - } - Error(sp, msg) => { - sess.parse_sess - .span_diagnostic - .struct_span_err(sp.substitute_dummy(def.span), &msg) - .emit(); - return dummy_syn_ext(); - } - ErrorReported => { - return dummy_syn_ext(); - } - }; + let argument_map = + match tt_parser.parse_tt(&mut Cow::Owned(parser), &argument_gram, &mut NoopTracker) { + Success(m) => m, + Failure(token, msg) => { + let s = parse_failure_msg(&token); + let sp = token.span.substitute_dummy(def.span); + let mut err = sess.parse_sess.span_diagnostic.struct_span_err(sp, &s); + err.span_label(sp, msg); + annotate_doc_comment(&mut err, sess.source_map(), sp); + err.emit(); + return dummy_syn_ext(); + } + Error(sp, msg) => { + sess.parse_sess + .span_diagnostic + .struct_span_err(sp.substitute_dummy(def.span), &msg) + .emit(); + return dummy_syn_ext(); + } + ErrorReported(_) => { + return dummy_syn_ext(); + } + }; let mut valid = true; @@ -597,30 +575,6 @@ pub fn compile_declarative_macro( (mk_syn_ext(expander), rule_spans) } -#[derive(Subdiagnostic)] -enum ExplainDocComment { - #[label(expand_explain_doc_comment_inner)] - Inner { - #[primary_span] - span: Span, - }, - #[label(expand_explain_doc_comment_outer)] - Outer { - #[primary_span] - span: Span, - }, -} - -fn annotate_doc_comment(err: &mut Diagnostic, sm: &SourceMap, span: Span) { - if let Ok(src) = sm.span_to_snippet(span) { - if src.starts_with("///") || src.starts_with("/**") { - err.subdiagnostic(ExplainDocComment::Outer { span }); - } else if src.starts_with("//!") || src.starts_with("/*!") { - err.subdiagnostic(ExplainDocComment::Inner { span }); - } - } -} - fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. @@ -1405,15 +1359,6 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String { } } -fn parser_from_cx(sess: &ParseSess, tts: TokenStream) -> Parser<'_> { - Parser::new(sess, tts, true, rustc_parse::MACRO_ARGUMENTS) -} - -/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For -/// other tokens, this is "unexpected token...". -fn parse_failure_msg(tok: &Token) -> String { - match tok.kind { - token::Eof => "unexpected end of macro invocation".to_string(), - _ => format!("no rules expected the token `{}`", pprust::token_to_string(tok),), - } +pub(super) fn parser_from_cx(sess: &ParseSess, tts: TokenStream, recovery: Recovery) -> Parser<'_> { + Parser::new(sess, tts, true, rustc_parse::MACRO_ARGUMENTS).recovery(recovery) } diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs index a3c631d33..e49f112bf 100644 --- a/compiler/rustc_expand/src/parse/tests.rs +++ b/compiler/rustc_expand/src/parse/tests.rs @@ -291,7 +291,7 @@ fn ttdelim_span() { .unwrap(); let tts: Vec<_> = match expr.kind { - ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().into_trees().collect(), + ast::ExprKind::MacCall(ref mac) => mac.args.tokens.clone().into_trees().collect(), _ => panic!("not a macro"), }; diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index faaf3b3fe..03bb5c1df 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -1,14 +1,12 @@ use crate::expand::{AstFragment, AstFragmentKind}; - use rustc_ast as ast; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; +use rustc_data_structures::fx::FxHashMap; use rustc_span::source_map::DUMMY_SP; use rustc_span::symbol::Ident; - use smallvec::{smallvec, SmallVec}; - -use rustc_data_structures::fx::FxHashMap; +use thin_vec::ThinVec; pub fn placeholder( kind: AstFragmentKind, @@ -17,8 +15,12 @@ pub fn placeholder( ) -> AstFragment { fn mac_placeholder() -> P<ast::MacCall> { P(ast::MacCall { - path: ast::Path { span: DUMMY_SP, segments: Vec::new(), tokens: None }, - args: P(ast::MacArgs::Empty), + path: ast::Path { span: DUMMY_SP, segments: ThinVec::new(), tokens: None }, + args: P(ast::DelimArgs { + dspan: ast::tokenstream::DelimSpan::dummy(), + delim: ast::MacDelimiter::Parenthesis, + tokens: ast::tokenstream::TokenStream::new(Vec::new()), + }), prior_type_ascription: None, }) } diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs index 1a2ab9d19..e9a691920 100644 --- a/compiler/rustc_expand/src/proc_macro.rs +++ b/compiler/rustc_expand/src/proc_macro.rs @@ -112,6 +112,7 @@ impl MultiItemModifier for DeriveProcMacro { span: Span, _meta_item: &ast::MetaItem, item: Annotatable, + _is_derive_const: bool, ) -> ExpandResult<Vec<Annotatable>, Annotatable> { // We need special handling for statement items // (e.g. `fn foo() { #[derive(Debug)] struct Bar; }`) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index cc2858d3f..761657961 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -516,26 +516,27 @@ impl server::TokenStream for Rustc<'_, '_> { // We don't use `TokenStream::from_ast` as the tokenstream currently cannot // be recovered in the general case. match &expr.kind { - ast::ExprKind::Lit(l) if l.token_lit.kind == token::Bool => { + ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => { Ok(tokenstream::TokenStream::token_alone( - token::Ident(l.token_lit.symbol, false), - l.span, + token::Ident(token_lit.symbol, false), + expr.span, )) } - ast::ExprKind::Lit(l) => { - Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token_lit), l.span)) + ast::ExprKind::Lit(token_lit) => { + Ok(tokenstream::TokenStream::token_alone(token::Literal(*token_lit), expr.span)) + } + ast::ExprKind::IncludedBytes(bytes) => { + let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit(); + Ok(tokenstream::TokenStream::token_alone(token::TokenKind::Literal(lit), expr.span)) } ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind { - ast::ExprKind::Lit(l) => match l.token_lit { + ast::ExprKind::Lit(token_lit) => match token_lit { token::Lit { kind: token::Integer | token::Float, .. } => { Ok(Self::TokenStream::from_iter([ // FIXME: The span of the `-` token is lost when // parsing, so we cannot faithfully recover it here. tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span), - tokenstream::TokenTree::token_alone( - token::Literal(l.token_lit), - l.span, - ), + tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span), ])) } _ => Err(()), diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs index e44f06081..539b04535 100644 --- a/compiler/rustc_expand/src/tests.rs +++ b/compiler/rustc_expand/src/tests.rs @@ -151,6 +151,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: & false, None, false, + false, ); let handler = Handler::with_emitter(true, None, Box::new(emitter)); handler.span_err(msp, "foo"); @@ -271,13 +272,13 @@ error: foo --> test.rs:3:3 | 3 | X0 Y0 - | ____^__- - | | ___| + | ___^__- + | |___| | || 4 | || X1 Y1 5 | || X2 Y2 | ||____^__- `Y` is a good letter too - | |____| + | |_____| | `X` is a good letter "#, @@ -310,12 +311,12 @@ error: foo --> test.rs:3:3 | 3 | X0 Y0 - | ____^__- - | | ___| + | ___^__- + | |___| | || 4 | || Y1 X1 | ||____-__^ `X` is a good letter - | |_____| + | |____| | `Y` is a good letter too "#, @@ -350,13 +351,13 @@ error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 - | ______^ -4 | | X1 Y1 Z1 - | |_________- + | _______^ +4 | | X1 Y1 Z1 + | | _________- 5 | || X2 Y2 Z2 | ||____^ `X` is a good letter -6 | | X3 Y3 Z3 - | |_____- `Y` is a good letter too +6 | | X3 Y3 Z3 + | |____- `Y` is a good letter too "#, ); @@ -394,15 +395,15 @@ error: foo --> test.rs:3:3 | 3 | X0 Y0 Z0 - | _____^__-__- - | | ____|__| - | || ___| + | ___^__-__- + | |___|__| + | ||___| | ||| 4 | ||| X1 Y1 Z1 5 | ||| X2 Y2 Z2 | |||____^__-__- `Z` label - | ||____|__| - | |____| `Y` is a good letter too + | ||_____|__| + | |______| `Y` is a good letter too | `X` is a good letter "#, @@ -486,17 +487,17 @@ error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 - | ______^ -4 | | X1 Y1 Z1 - | |____^_- + | _______^ +4 | | X1 Y1 Z1 + | | ____^_- | ||____| - | | `X` is a good letter -5 | | X2 Y2 Z2 - | |____-______- `Y` is a good letter too - | ____| - | | -6 | | X3 Y3 Z3 - | |________- `Z` + | | `X` is a good letter +5 | | X2 Y2 Z2 + | |___-______- `Y` is a good letter too + | ___| + | | +6 | | X3 Y3 Z3 + | |_______- `Z` "#, ); @@ -569,14 +570,14 @@ error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 - | ______^ -4 | | X1 Y1 Z1 - | |____^____- + | _______^ +4 | | X1 Y1 Z1 + | | ____^____- | ||____| - | | `X` is a good letter -5 | | X2 Y2 Z2 -6 | | X3 Y3 Z3 - | |___________- `Y` is a good letter too + | | `X` is a good letter +5 | | X2 Y2 Z2 +6 | | X3 Y3 Z3 + | |__________- `Y` is a good letter too "#, ); @@ -940,18 +941,18 @@ error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 - | ______^ -4 | | X1 Y1 Z1 - | |____^____- + | _______^ +4 | | X1 Y1 Z1 + | | ____^____- | ||____| - | | `X` is a good letter -5 | | 1 -6 | | 2 -7 | | 3 -... | -15 | | X2 Y2 Z2 -16 | | X3 Y3 Z3 - | |___________- `Y` is a good letter too + | | `X` is a good letter +5 | | 1 +6 | | 2 +7 | | 3 +... | +15 | | X2 Y2 Z2 +16 | | X3 Y3 Z3 + | |__________- `Y` is a good letter too "#, ); @@ -995,21 +996,21 @@ error: foo --> test.rs:3:6 | 3 | X0 Y0 Z0 - | ______^ -4 | | 1 -5 | | 2 -6 | | 3 -7 | | X1 Y1 Z1 - | |_________- + | _______^ +4 | | 1 +5 | | 2 +6 | | 3 +7 | | X1 Y1 Z1 + | | _________- 8 | || 4 9 | || 5 10 | || 6 11 | || X2 Y2 Z2 | ||__________- `Z` is a good letter too -... | -15 | | 10 -16 | | X3 Y3 Z3 - | |_______^ `Y` is a good letter +... | +15 | | 10 +16 | | X3 Y3 Z3 + | |________^ `Y` is a good letter "#, ); |