summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_expand
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:03 +0000
commit64d98f8ee037282c35007b64c2649055c56af1db (patch)
tree5492bcf97fce41ee1c0b1cc2add283f3e66cdab0 /compiler/rustc_expand
parentAdding debian version 1.67.1+dfsg1-1. (diff)
downloadrustc-64d98f8ee037282c35007b64c2649055c56af1db.tar.xz
rustc-64d98f8ee037282c35007b64c2649055c56af1db.zip
Merging upstream version 1.68.2+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_expand')
-rw-r--r--compiler/rustc_expand/src/base.rs133
-rw-r--r--compiler/rustc_expand/src/build.rs50
-rw-r--r--compiler/rustc_expand/src/config.rs108
-rw-r--r--compiler/rustc_expand/src/errors.rs326
-rw-r--r--compiler/rustc_expand/src/expand.rs104
-rw-r--r--compiler/rustc_expand/src/lib.rs6
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs67
-rw-r--r--compiler/rustc_expand/src/mbe/macro_check.rs14
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs37
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs110
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs6
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs32
-rw-r--r--compiler/rustc_expand/src/module.rs80
-rw-r--r--compiler/rustc_expand/src/parse/tests.rs19
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs9
-rw-r--r--compiler/rustc_expand/src/tests.rs1
16 files changed, 727 insertions, 375 deletions
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs
index 9d6a4f9a1..951d59246 100644
--- a/compiler/rustc_expand/src/base.rs
+++ b/compiler/rustc_expand/src/base.rs
@@ -1,3 +1,11 @@
+#![deny(rustc::untranslatable_diagnostic)]
+
+use crate::errors::{
+ ArgumentNotAttributes, AttrNoArguments, AttributeMetaItem, AttributeSingleWord,
+ AttributesWrongForm, CannotBeNameOfMacro, ExpectedCommaInList, HelperAttributeNameInvalid,
+ MacroBodyStability, MacroConstStability, NotAMetaItem, OnlyOneArgument, OnlyOneWord,
+ ResolveRelativePath, TakesNoArguments,
+};
use crate::expand::{self, AstFragment, Invocation};
use crate::module::DirOwnership;
@@ -23,12 +31,11 @@ use rustc_span::edition::Edition;
use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId};
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::{BytePos, FileName, RealFileName, Span, DUMMY_SP};
+use rustc_span::{BytePos, FileName, Span, DUMMY_SP};
use smallvec::{smallvec, SmallVec};
-use std::default::Default;
use std::iter;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
use std::rc::Rc;
pub(crate) use rustc_span::hygiene::MacroKind;
@@ -56,21 +63,21 @@ pub enum Annotatable {
impl Annotatable {
pub fn span(&self) -> Span {
- match *self {
- Annotatable::Item(ref item) => item.span,
- Annotatable::TraitItem(ref trait_item) => trait_item.span,
- Annotatable::ImplItem(ref impl_item) => impl_item.span,
- Annotatable::ForeignItem(ref foreign_item) => foreign_item.span,
- Annotatable::Stmt(ref stmt) => stmt.span,
- Annotatable::Expr(ref expr) => expr.span,
- Annotatable::Arm(ref arm) => arm.span,
- Annotatable::ExprField(ref field) => field.span,
- Annotatable::PatField(ref fp) => fp.pat.span,
- Annotatable::GenericParam(ref gp) => gp.ident.span,
- Annotatable::Param(ref p) => p.span,
- Annotatable::FieldDef(ref sf) => sf.span,
- Annotatable::Variant(ref v) => v.span,
- Annotatable::Crate(ref c) => c.spans.inner_span,
+ match self {
+ Annotatable::Item(item) => item.span,
+ Annotatable::TraitItem(trait_item) => trait_item.span,
+ Annotatable::ImplItem(impl_item) => impl_item.span,
+ Annotatable::ForeignItem(foreign_item) => foreign_item.span,
+ Annotatable::Stmt(stmt) => stmt.span,
+ Annotatable::Expr(expr) => expr.span,
+ Annotatable::Arm(arm) => arm.span,
+ Annotatable::ExprField(field) => field.span,
+ Annotatable::PatField(fp) => fp.pat.span,
+ Annotatable::GenericParam(gp) => gp.ident.span,
+ Annotatable::Param(p) => p.span,
+ Annotatable::FieldDef(sf) => sf.span,
+ Annotatable::Variant(v) => v.span,
+ Annotatable::Crate(c) => c.spans.inner_span,
}
}
@@ -789,26 +796,16 @@ impl SyntaxExtension {
.unwrap_or_else(|| (None, helper_attrs));
let (stability, const_stability, body_stability) = attr::find_stability(&sess, attrs, span);
if let Some((_, sp)) = const_stability {
- sess.parse_sess
- .span_diagnostic
- .struct_span_err(sp, "macros cannot have const stability attributes")
- .span_label(sp, "invalid const stability attribute")
- .span_label(
- sess.source_map().guess_head_span(span),
- "const stability attribute affects this macro",
- )
- .emit();
+ sess.emit_err(MacroConstStability {
+ span: sp,
+ head_span: sess.source_map().guess_head_span(span),
+ });
}
if let Some((_, sp)) = body_stability {
- sess.parse_sess
- .span_diagnostic
- .struct_span_err(sp, "macros cannot have body stability attributes")
- .span_label(sp, "invalid body stability attribute")
- .span_label(
- sess.source_map().guess_head_span(span),
- "body stability attribute affects this macro",
- )
- .emit();
+ sess.emit_err(MacroBodyStability {
+ span: sp,
+ head_span: sess.source_map().guess_head_span(span),
+ });
}
SyntaxExtension {
@@ -1200,13 +1197,11 @@ pub fn resolve_path(
.expect("attempting to resolve a file path in an external file"),
FileName::DocTest(path, _) => path,
other => {
- return Err(parse_sess.span_diagnostic.struct_span_err(
+ return Err(ResolveRelativePath {
span,
- &format!(
- "cannot resolve relative path in non-file source `{}`",
- parse_sess.source_map().filename_for_diagnostics(&other)
- ),
- ));
+ path: parse_sess.source_map().filename_for_diagnostics(&other).to_string(),
+ }
+ .into_diagnostic(&parse_sess.span_diagnostic));
}
};
result.pop();
@@ -1222,6 +1217,8 @@ pub fn resolve_path(
/// The returned bool indicates whether an applicable suggestion has already been
/// added to the diagnostic to avoid emitting multiple suggestions. `Err(None)`
/// indicates that an ast error was encountered.
+// FIXME(Nilstrieb) Make this function setup translatable
+#[allow(rustc::untranslatable_diagnostic)]
pub fn expr_to_spanned_string<'a>(
cx: &'a mut ExtCtxt<'_>,
expr: P<ast::Expr>,
@@ -1234,7 +1231,7 @@ pub fn expr_to_spanned_string<'a>(
Err(match expr.kind {
ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
Ok(ast::LitKind::Str(s, style)) => return Ok((s, style, expr.span)),
- Ok(ast::LitKind::ByteStr(_)) => {
+ Ok(ast::LitKind::ByteStr(..)) => {
let mut err = cx.struct_span_err(expr.span, err_msg);
let span = expr.span.shrink_to_lo();
err.span_suggestion(
@@ -1280,9 +1277,9 @@ pub fn expr_to_string(
/// compilation should call
/// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be
/// done as rarely as possible).
-pub fn check_zero_tts(cx: &ExtCtxt<'_>, sp: Span, tts: TokenStream, name: &str) {
+pub fn check_zero_tts(cx: &ExtCtxt<'_>, span: Span, tts: TokenStream, name: &str) {
if !tts.is_empty() {
- cx.span_err(sp, &format!("{} takes no arguments", name));
+ cx.emit_err(TakesNoArguments { span, name });
}
}
@@ -1304,31 +1301,27 @@ pub fn parse_expr(p: &mut parser::Parser<'_>) -> Option<P<ast::Expr>> {
/// expect exactly one string literal, or emit an error and return `None`.
pub fn get_single_str_from_tts(
cx: &mut ExtCtxt<'_>,
- sp: Span,
+ span: Span,
tts: TokenStream,
name: &str,
) -> Option<Symbol> {
let mut p = cx.new_parser_from_tts(tts);
if p.token == token::Eof {
- cx.span_err(sp, &format!("{} takes 1 argument", name));
+ cx.emit_err(OnlyOneArgument { span, name });
return None;
}
let ret = parse_expr(&mut p)?;
let _ = p.eat(&token::Comma);
if p.token != token::Eof {
- cx.span_err(sp, &format!("{} takes 1 argument", name));
+ cx.emit_err(OnlyOneArgument { span, name });
}
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| s)
}
/// Extracts comma-separated expressions from `tts`.
/// On error, emit it, and return `None`.
-pub fn get_exprs_from_tts(
- cx: &mut ExtCtxt<'_>,
- sp: Span,
- tts: TokenStream,
-) -> Option<Vec<P<ast::Expr>>> {
+pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>, tts: TokenStream) -> Option<Vec<P<ast::Expr>>> {
let mut p = cx.new_parser_from_tts(tts);
let mut es = Vec::new();
while p.token != token::Eof {
@@ -1343,7 +1336,7 @@ pub fn get_exprs_from_tts(
continue;
}
if p.token != token::Eof {
- cx.span_err(sp, "expected token: `,`");
+ cx.emit_err(ExpectedCommaInList { span: p.token.span });
return None;
}
}
@@ -1353,64 +1346,58 @@ pub fn get_exprs_from_tts(
pub fn parse_macro_name_and_helper_attrs(
diag: &rustc_errors::Handler,
attr: &Attribute,
- descr: &str,
+ macro_type: &str,
) -> Option<(Symbol, Vec<Symbol>)> {
// Once we've located the `#[proc_macro_derive]` attribute, verify
// that it's of the form `#[proc_macro_derive(Foo)]` or
// `#[proc_macro_derive(Foo, attributes(A, ..))]`
let list = attr.meta_item_list()?;
if list.len() != 1 && list.len() != 2 {
- diag.span_err(attr.span, "attribute must have either one or two arguments");
+ diag.emit_err(AttrNoArguments { span: attr.span });
return None;
}
let Some(trait_attr) = list[0].meta_item() else {
- diag.span_err(list[0].span(), "not a meta item");
+ diag.emit_err(NotAMetaItem {span: list[0].span()});
return None;
};
let trait_ident = match trait_attr.ident() {
Some(trait_ident) if trait_attr.is_word() => trait_ident,
_ => {
- diag.span_err(trait_attr.span, "must only be one word");
+ diag.emit_err(OnlyOneWord { span: trait_attr.span });
return None;
}
};
if !trait_ident.name.can_be_raw() {
- diag.span_err(
- trait_attr.span,
- &format!("`{}` cannot be a name of {} macro", trait_ident, descr),
- );
+ diag.emit_err(CannotBeNameOfMacro { span: trait_attr.span, trait_ident, macro_type });
}
let attributes_attr = list.get(1);
let proc_attrs: Vec<_> = if let Some(attr) = attributes_attr {
if !attr.has_name(sym::attributes) {
- diag.span_err(attr.span(), "second argument must be `attributes`");
+ diag.emit_err(ArgumentNotAttributes { span: attr.span() });
}
attr.meta_item_list()
.unwrap_or_else(|| {
- diag.span_err(attr.span(), "attribute must be of form: `attributes(foo, bar)`");
+ diag.emit_err(AttributesWrongForm { span: attr.span() });
&[]
})
.iter()
.filter_map(|attr| {
let Some(attr) = attr.meta_item() else {
- diag.span_err(attr.span(), "not a meta item");
+ diag.emit_err(AttributeMetaItem { span: attr.span() });
return None;
};
let ident = match attr.ident() {
Some(ident) if attr.is_word() => ident,
_ => {
- diag.span_err(attr.span, "must only be one word");
+ diag.emit_err(AttributeSingleWord { span: attr.span });
return None;
}
};
if !ident.name.can_be_raw() {
- diag.span_err(
- attr.span,
- &format!("`{}` cannot be a name of derive helper attribute", ident),
- );
+ diag.emit_err(HelperAttributeNameInvalid { span: attr.span, name: ident });
}
Some(ident.name)
@@ -1436,8 +1423,10 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool {
if let [variant] = &*enum_def.variants {
if variant.ident.name == sym::Input {
let filename = sess.source_map().span_to_filename(item.ident.span);
- if let FileName::Real(RealFileName::LocalPath(path)) = filename {
- if let Some(c) = path
+ if let FileName::Real(real) = filename {
+ if let Some(c) = real
+ .local_path()
+ .unwrap_or(Path::new(""))
.components()
.flat_map(|c| c.as_os_str().to_str())
.find(|c| c.starts_with("rental") || c.starts_with("allsorts-rental"))
diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs
index 4812bdd9d..9b16e79d4 100644
--- a/compiler/rustc_expand/src/build.rs
+++ b/compiler/rustc_expand/src/build.rs
@@ -1,8 +1,7 @@
use crate::base::ExtCtxt;
-use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, AttrVec, BlockCheckMode, Expr, LocalKind, PatKind, UnOp};
-use rustc_data_structures::sync::Lrc;
+use rustc_ast::{attr, token, util::literal};
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span;
@@ -88,14 +87,14 @@ impl<'a> ExtCtxt<'a> {
self.anon_const(span, ast::ExprKind::Path(None, self.path_ident(span, ident)))
}
- pub fn ty_rptr(
+ pub fn ty_ref(
&self,
span: Span,
ty: P<ast::Ty>,
lifetime: Option<ast::Lifetime>,
mutbl: ast::Mutability,
) -> P<ast::Ty> {
- self.ty(span, ast::TyKind::Rptr(lifetime, self.ty_mt(ty, mutbl)))
+ self.ty(span, ast::TyKind::Ref(lifetime, self.ty_mt(ty, mutbl)))
}
pub fn ty_ptr(&self, span: Span, ty: P<ast::Ty>, mutbl: ast::Mutability) -> P<ast::Ty> {
@@ -332,36 +331,36 @@ impl<'a> ExtCtxt<'a> {
self.expr_struct(span, self.path_ident(span, id), fields)
}
- fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
- let token_lit = lit_kind.to_token_lit();
- self.expr(span, ast::ExprKind::Lit(token_lit))
+ pub fn expr_usize(&self, span: Span, n: usize) -> P<ast::Expr> {
+ let suffix = Some(ast::UintTy::Usize.name());
+ let lit = token::Lit::new(token::Integer, sym::integer(n), suffix);
+ self.expr(span, ast::ExprKind::Lit(lit))
}
- pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
- self.expr_lit(
- span,
- ast::LitKind::Int(i as u128, ast::LitIntType::Unsigned(ast::UintTy::Usize)),
- )
- }
-
- pub fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr> {
- self.expr_lit(sp, ast::LitKind::Int(u as u128, ast::LitIntType::Unsigned(ast::UintTy::U32)))
+ pub fn expr_u32(&self, span: Span, n: u32) -> P<ast::Expr> {
+ let suffix = Some(ast::UintTy::U32.name());
+ let lit = token::Lit::new(token::Integer, sym::integer(n), suffix);
+ self.expr(span, ast::ExprKind::Lit(lit))
}
- pub fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> {
- self.expr_lit(sp, ast::LitKind::Bool(value))
+ pub fn expr_bool(&self, span: Span, value: bool) -> P<ast::Expr> {
+ let lit = token::Lit::new(token::Bool, if value { kw::True } else { kw::False }, None);
+ self.expr(span, ast::ExprKind::Lit(lit))
}
- pub fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
- self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
+ pub fn expr_str(&self, span: Span, s: Symbol) -> P<ast::Expr> {
+ let lit = token::Lit::new(token::Str, literal::escape_string_symbol(s), None);
+ self.expr(span, ast::ExprKind::Lit(lit))
}
- pub fn expr_char(&self, sp: Span, ch: char) -> P<ast::Expr> {
- self.expr_lit(sp, ast::LitKind::Char(ch))
+ pub fn expr_char(&self, span: Span, ch: char) -> P<ast::Expr> {
+ let lit = token::Lit::new(token::Char, literal::escape_char_symbol(ch), None);
+ self.expr(span, ast::ExprKind::Lit(lit))
}
- pub fn expr_byte_str(&self, sp: Span, bytes: Vec<u8>) -> P<ast::Expr> {
- self.expr_lit(sp, ast::LitKind::ByteStr(Lrc::from(bytes)))
+ pub fn expr_byte_str(&self, span: Span, bytes: Vec<u8>) -> P<ast::Expr> {
+ let lit = token::Lit::new(token::ByteStr, literal::escape_byte_str_symbol(&bytes), None);
+ self.expr(span, ast::ExprKind::Lit(lit))
}
/// `[expr1, expr2, ...]`
@@ -534,6 +533,7 @@ impl<'a> ExtCtxt<'a> {
ast::ExprKind::Closure(Box::new(ast::Closure {
binder: ast::ClosureBinder::NotPresent,
capture_clause: ast::CaptureBy::Ref,
+ constness: ast::Const::No,
asyncness: ast::Async::No,
movability: ast::Movability::Movable,
fn_decl,
@@ -627,7 +627,7 @@ impl<'a> ExtCtxt<'a> {
// Builds `#[name = val]`.
//
- // Note: `span` is used for both the identifer and the value.
+ // Note: `span` is used for both the identifier and the value.
pub fn attr_name_value_str(&self, name: Symbol, val: Symbol, span: Span) -> ast::Attribute {
let g = &self.sess.parse_sess.attr_id_generator;
attr::mk_attr_name_value_str(g, ast::AttrStyle::Outer, name, val, span)
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index 2510795c2..1fcbdfd9b 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -1,5 +1,9 @@
//! Conditional compilation stripping.
+use crate::errors::{
+ FeatureIncludedInEdition, FeatureNotAllowed, FeatureRemoved, FeatureRemovedReason, InvalidCfg,
+ MalformedFeatureAttribute, MalformedFeatureAttributeHelp, RemoveExprNotSupported,
+};
use rustc_ast::ptr::P;
use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree};
@@ -10,7 +14,6 @@ use rustc_ast::{self as ast, AttrStyle, Attribute, HasAttrs, HasTokens, MetaItem
use rustc_attr as attr;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::map_in_place::MapInPlace;
-use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
use rustc_feature::{Feature, Features, State as FeatureState};
use rustc_feature::{
ACCEPTED_FEATURES, ACTIVE_FEATURES, REMOVED_FEATURES, STABLE_REMOVED_FEATURES,
@@ -33,18 +36,12 @@ pub struct StripUnconfigured<'a> {
pub lint_node_id: NodeId,
}
-fn get_features(
- sess: &Session,
- span_handler: &Handler,
- krate_attrs: &[ast::Attribute],
-) -> Features {
- fn feature_removed(span_handler: &Handler, span: Span, reason: Option<&str>) {
- let mut err = struct_span_err!(span_handler, span, E0557, "feature has been removed");
- err.span_label(span, "feature has been removed");
- if let Some(reason) = reason {
- err.note(reason);
- }
- err.emit();
+fn get_features(sess: &Session, krate_attrs: &[ast::Attribute]) -> Features {
+ fn feature_removed(sess: &Session, span: Span, reason: Option<&str>) {
+ sess.emit_err(FeatureRemoved {
+ span,
+ reason: reason.map(|reason| FeatureRemovedReason { reason }),
+ });
}
fn active_features_up_to(edition: Edition) -> impl Iterator<Item = &'static Feature> {
@@ -117,34 +114,34 @@ fn get_features(
continue;
};
- let bad_input = |span| {
- struct_span_err!(span_handler, span, E0556, "malformed `feature` attribute input")
- };
-
for mi in list {
let name = match mi.ident() {
Some(ident) if mi.is_word() => ident.name,
Some(ident) => {
- bad_input(mi.span())
- .span_suggestion(
- mi.span(),
- "expected just one word",
- ident.name,
- Applicability::MaybeIncorrect,
- )
- .emit();
+ sess.emit_err(MalformedFeatureAttribute {
+ span: mi.span(),
+ help: MalformedFeatureAttributeHelp::Suggestion {
+ span: mi.span(),
+ suggestion: ident.name,
+ },
+ });
continue;
}
None => {
- bad_input(mi.span()).span_label(mi.span(), "expected just one word").emit();
+ sess.emit_err(MalformedFeatureAttribute {
+ span: mi.span(),
+ help: MalformedFeatureAttributeHelp::Label { span: mi.span() },
+ });
continue;
}
};
- if let Some(edition) = edition_enabled_features.get(&name) {
- let msg =
- &format!("the feature `{}` is included in the Rust {} edition", name, edition);
- span_handler.struct_span_warn_with_code(mi.span(), msg, error_code!(E0705)).emit();
+ if let Some(&edition) = edition_enabled_features.get(&name) {
+ sess.emit_warning(FeatureIncludedInEdition {
+ span: mi.span(),
+ feature: name,
+ edition,
+ });
continue;
}
@@ -159,7 +156,7 @@ fn get_features(
if let FeatureState::Removed { reason } | FeatureState::Stabilized { reason } =
state
{
- feature_removed(span_handler, mi.span(), *reason);
+ feature_removed(sess, mi.span(), *reason);
continue;
}
}
@@ -173,14 +170,7 @@ fn get_features(
if let Some(allowed) = sess.opts.unstable_opts.allow_features.as_ref() {
if allowed.iter().all(|f| name.as_str() != f) {
- struct_span_err!(
- span_handler,
- mi.span(),
- E0725,
- "the feature `{}` is not in the list of allowed features",
- name
- )
- .emit();
+ sess.emit_err(FeatureNotAllowed { span: mi.span(), name });
continue;
}
}
@@ -221,7 +211,7 @@ pub fn features(
}
Some(attrs) => {
krate.attrs = attrs;
- let features = get_features(sess, diag, &krate.attrs);
+ let features = get_features(sess, &krate.attrs);
if err_count == diag.err_count() {
// Avoid reconfiguring malformed `cfg_attr`s.
strip_unconfigured.features = Some(&features);
@@ -308,7 +298,7 @@ impl<'a> StripUnconfigured<'a> {
Some(AttrTokenTree::Delimited(sp, delim, inner))
.into_iter()
}
- AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(ref nt) = token.kind => {
+ AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(nt) = &token.kind => {
panic!(
"Nonterminal should have been flattened at {:?}: {:?}",
token.span, nt
@@ -503,8 +493,7 @@ impl<'a> StripUnconfigured<'a> {
// N.B., this is intentionally not part of the visit_expr() function
// in order for filter_map_expr() to be able to avoid this check
if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(*a)) {
- let msg = "removing an expression is not supported in this position";
- self.sess.parse_sess.span_diagnostic.span_err(attr.span, msg);
+ self.sess.emit_err(RemoveExprNotSupported { span: attr.span });
}
self.process_cfg_attrs(expr);
@@ -513,27 +502,26 @@ impl<'a> StripUnconfigured<'a> {
}
pub fn parse_cfg<'a>(meta_item: &'a MetaItem, sess: &Session) -> Option<&'a MetaItem> {
- let error = |span, msg, suggestion: &str| {
- let mut err = sess.parse_sess.span_diagnostic.struct_span_err(span, msg);
- if !suggestion.is_empty() {
- err.span_suggestion(
- span,
- "expected syntax is",
- suggestion,
- Applicability::HasPlaceholders,
- );
- }
- err.emit();
- None
- };
let span = meta_item.span;
match meta_item.meta_item_list() {
- None => error(span, "`cfg` is not followed by parentheses", "cfg(/* predicate */)"),
- Some([]) => error(span, "`cfg` predicate is not specified", ""),
- Some([_, .., l]) => error(l.span(), "multiple `cfg` predicates are specified", ""),
+ None => {
+ sess.emit_err(InvalidCfg::NotFollowedByParens { span });
+ None
+ }
+ Some([]) => {
+ sess.emit_err(InvalidCfg::NoPredicate { span });
+ None
+ }
+ Some([_, .., l]) => {
+ sess.emit_err(InvalidCfg::MultiplePredicates { span: l.span() });
+ None
+ }
Some([single]) => match single.meta_item() {
Some(meta_item) => Some(meta_item),
- None => error(single.span(), "`cfg` predicate key cannot be a literal", ""),
+ None => {
+ sess.emit_err(InvalidCfg::PredicateLiteral { span: single.span() });
+ None
+ }
},
}
}
diff --git a/compiler/rustc_expand/src/errors.rs b/compiler/rustc_expand/src/errors.rs
index d383f4832..afe5169d3 100644
--- a/compiler/rustc_expand/src/errors.rs
+++ b/compiler/rustc_expand/src/errors.rs
@@ -1,6 +1,10 @@
+use rustc_ast::ast;
use rustc_macros::Diagnostic;
-use rustc_span::symbol::MacroRulesNormalizedIdent;
-use rustc_span::Span;
+use rustc_session::Limit;
+use rustc_span::edition::Edition;
+use rustc_span::symbol::{Ident, MacroRulesNormalizedIdent};
+use rustc_span::{Span, Symbol};
+use std::borrow::Cow;
#[derive(Diagnostic)]
#[diag(expand_expr_repeat_no_syntax_vars)]
@@ -46,3 +50,321 @@ pub(crate) struct MetaVarsDifSeqMatchers {
pub span: Span,
pub msg: String,
}
+
+#[derive(Diagnostic)]
+#[diag(expand_resolve_relative_path)]
+pub(crate) struct ResolveRelativePath {
+ #[primary_span]
+ pub span: Span,
+ pub path: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_macro_const_stability)]
+pub(crate) struct MacroConstStability {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[label(label2)]
+ pub head_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_macro_body_stability)]
+pub(crate) struct MacroBodyStability {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[label(label2)]
+ pub head_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_attr_no_arguments)]
+pub(crate) struct AttrNoArguments {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_not_a_meta_item)]
+pub(crate) struct NotAMetaItem {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_only_one_word)]
+pub(crate) struct OnlyOneWord {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_cannot_be_name_of_macro)]
+pub(crate) struct CannotBeNameOfMacro<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub trait_ident: Ident,
+ pub macro_type: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_arg_not_attributes)]
+pub(crate) struct ArgumentNotAttributes {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_attributes_wrong_form)]
+pub(crate) struct AttributesWrongForm {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_attribute_meta_item)]
+pub(crate) struct AttributeMetaItem {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_attribute_single_word)]
+pub(crate) struct AttributeSingleWord {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_helper_attribute_name_invalid)]
+pub(crate) struct HelperAttributeNameInvalid {
+ #[primary_span]
+ pub span: Span,
+ pub name: Ident,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_expected_comma_in_list)]
+pub(crate) struct ExpectedCommaInList {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_only_one_argument)]
+pub(crate) struct OnlyOneArgument<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub name: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_takes_no_arguments)]
+pub(crate) struct TakesNoArguments<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub name: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_feature_included_in_edition, code = "E0705")]
+pub(crate) struct FeatureIncludedInEdition {
+ #[primary_span]
+ pub span: Span,
+ pub feature: Symbol,
+ pub edition: Edition,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_feature_removed, code = "E0557")]
+pub(crate) struct FeatureRemoved<'a> {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ #[subdiagnostic]
+ pub reason: Option<FeatureRemovedReason<'a>>,
+}
+
+#[derive(Subdiagnostic)]
+#[note(reason)]
+pub(crate) struct FeatureRemovedReason<'a> {
+ pub reason: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_feature_not_allowed, code = "E0725")]
+pub(crate) struct FeatureNotAllowed {
+ #[primary_span]
+ pub span: Span,
+ pub name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_recursion_limit_reached)]
+#[help]
+pub(crate) struct RecursionLimitReached<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub descr: String,
+ pub suggested_limit: Limit,
+ pub crate_name: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_malformed_feature_attribute, code = "E0556")]
+pub(crate) struct MalformedFeatureAttribute {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub help: MalformedFeatureAttributeHelp,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MalformedFeatureAttributeHelp {
+ #[label(expected)]
+ Label {
+ #[primary_span]
+ span: Span,
+ },
+ #[suggestion(expected, code = "{suggestion}", applicability = "maybe-incorrect")]
+ Suggestion {
+ #[primary_span]
+ span: Span,
+ suggestion: Symbol,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_remove_expr_not_supported)]
+pub(crate) struct RemoveExprNotSupported {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum InvalidCfg {
+ #[diag(expand_invalid_cfg_no_parens)]
+ NotFollowedByParens {
+ #[primary_span]
+ #[suggestion(
+ expand_invalid_cfg_expected_syntax,
+ code = "cfg(/* predicate */)",
+ applicability = "has-placeholders"
+ )]
+ span: Span,
+ },
+ #[diag(expand_invalid_cfg_no_predicate)]
+ NoPredicate {
+ #[primary_span]
+ #[suggestion(
+ expand_invalid_cfg_expected_syntax,
+ code = "cfg(/* predicate */)",
+ applicability = "has-placeholders"
+ )]
+ span: Span,
+ },
+ #[diag(expand_invalid_cfg_multiple_predicates)]
+ MultiplePredicates {
+ #[primary_span]
+ span: Span,
+ },
+ #[diag(expand_invalid_cfg_predicate_literal)]
+ PredicateLiteral {
+ #[primary_span]
+ span: Span,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_wrong_fragment_kind)]
+pub(crate) struct WrongFragmentKind<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub kind: &'a str,
+ pub name: &'a ast::Path,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_unsupported_key_value)]
+pub(crate) struct UnsupportedKeyValue {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_incomplete_parse)]
+#[note]
+pub(crate) struct IncompleteParse<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub token: Cow<'a, str>,
+ #[label]
+ pub label_span: Span,
+ pub macro_path: &'a ast::Path,
+ pub kind_name: &'a str,
+
+ #[suggestion(
+ suggestion_add_semi,
+ style = "verbose",
+ code = ";",
+ applicability = "maybe-incorrect"
+ )]
+ pub add_semicolon: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_remove_node_not_supported)]
+pub(crate) struct RemoveNodeNotSupported {
+ #[primary_span]
+ pub span: Span,
+ pub descr: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_module_circular)]
+pub(crate) struct ModuleCircular {
+ #[primary_span]
+ pub span: Span,
+ pub modules: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_module_in_block)]
+pub(crate) struct ModuleInBlock {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub name: Option<ModuleInBlockName>,
+}
+
+#[derive(Subdiagnostic)]
+#[note(note)]
+pub(crate) struct ModuleInBlockName {
+ #[primary_span]
+ pub span: Span,
+ pub name: Ident,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_module_file_not_found, code = "E0583")]
+#[help]
+pub(crate) struct ModuleFileNotFound {
+ #[primary_span]
+ pub span: Span,
+ pub name: Ident,
+ pub default_path: String,
+ pub secondary_path: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(expand_module_multiple_candidates, code = "E0761")]
+#[help]
+pub(crate) struct ModuleMultipleCandidates {
+ #[primary_span]
+ pub span: Span,
+ pub name: Ident,
+ pub default_path: String,
+ pub secondary_path: String,
+}
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 1014ec220..79d058d9c 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -1,5 +1,9 @@
use crate::base::*;
use crate::config::StripUnconfigured;
+use crate::errors::{
+ IncompleteParse, RecursionLimitReached, RemoveExprNotSupported, RemoveNodeNotSupported,
+ UnsupportedKeyValue, WrongFragmentKind,
+};
use crate::hygiene::SyntaxContext;
use crate::mbe::diagnostics::annotate_err_with_kind;
use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod};
@@ -18,7 +22,7 @@ use rustc_ast::{NestedMetaItem, NodeId, PatKind, StmtKind, TyKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::map_in_place::MapInPlace;
use rustc_data_structures::sync::Lrc;
-use rustc_errors::{Applicability, PResult};
+use rustc_errors::PResult;
use rustc_feature::Features;
use rustc_parse::parser::{
AttemptLocalParseRecovery, CommaRecoveryMode, ForceCollect, Parser, RecoverColon, RecoverComma,
@@ -140,12 +144,12 @@ macro_rules! ast_fragments {
}
pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
- match *self {
- AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr),
+ match self {
+ AstFragment::OptExpr(Some(expr)) => visitor.visit_expr(expr),
AstFragment::OptExpr(None) => {}
- AstFragment::MethodReceiverExpr(ref expr) => visitor.visit_method_receiver_expr(expr),
- $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)?)*
- $($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] {
+ AstFragment::MethodReceiverExpr(expr) => visitor.visit_method_receiver_expr(expr),
+ $($(AstFragment::$Kind(ast) => visitor.$visit_ast(ast),)?)*
+ $($(AstFragment::$Kind(ast) => for ast_elt in &ast[..] {
visitor.$visit_ast_elt(ast_elt, $($args)*);
})?)*
}
@@ -583,12 +587,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
.resolver
.visit_ast_fragment_with_placeholders(self.cx.current_expansion.id, &fragment);
- if self.cx.sess.opts.unstable_opts.incremental_relative_spans {
+ if self.cx.sess.opts.incremental_relative_spans() {
for (invoc, _) in invocations.iter_mut() {
let expn_id = invoc.expansion_data.id;
let parent_def = self.cx.resolver.invocation_parent(expn_id);
let span = match &mut invoc.kind {
- InvocationKind::Bang { ref mut span, .. } => span,
+ InvocationKind::Bang { span, .. } => span,
InvocationKind::Attr { attr, .. } => &mut attr.span,
InvocationKind::Derive { path, .. } => &mut path.span,
};
@@ -606,29 +610,22 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
Limit(0) => Limit(2),
limit => limit * 2,
};
- self.cx
- .struct_span_err(
- expn_data.call_site,
- &format!("recursion limit reached while expanding `{}`", expn_data.kind.descr()),
- )
- .help(&format!(
- "consider increasing the recursion limit by adding a \
- `#![recursion_limit = \"{}\"]` attribute to your crate (`{}`)",
- suggested_limit, self.cx.ecfg.crate_name,
- ))
- .emit();
+
+ self.cx.emit_err(RecursionLimitReached {
+ span: expn_data.call_site,
+ descr: expn_data.kind.descr(),
+ suggested_limit,
+ crate_name: &self.cx.ecfg.crate_name,
+ });
+
self.cx.trace_macros_diag();
}
/// A macro's expansion does not fit in this fragment kind.
/// For example, a non-type macro in a type position.
fn error_wrong_fragment_kind(&mut self, kind: AstFragmentKind, mac: &ast::MacCall, span: Span) {
- let msg = format!(
- "non-{kind} macro in {kind} position: {path}",
- kind = kind.name(),
- path = pprust::path_to_string(&mac.path),
- );
- self.cx.span_err(span, &msg);
+ self.cx.emit_err(WrongFragmentKind { span, kind: kind.name(), name: &mac.path });
+
self.cx.trace_macros_diag();
}
@@ -707,7 +704,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
};
let attr_item = attr.unwrap_normal_item();
if let AttrArgs::Eq(..) = attr_item.args {
- self.cx.span_err(span, "key-value macro attributes are not supported");
+ self.cx.emit_err(UnsupportedKeyValue { span });
}
let inner_tokens = attr_item.args.inner_tokens();
let Ok(tok_result) = expander.expand(self.cx, span, inner_tokens, tokens) else {
@@ -729,9 +726,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
};
if fragment_kind == AstFragmentKind::Expr && items.is_empty() {
- let msg =
- "removing an expression is not supported in this position";
- self.cx.span_err(span, msg);
+ self.cx.emit_err(RemoveExprNotSupported { span });
fragment_kind.dummy(span)
} else {
fragment_kind.expect_from_annotatables(items)
@@ -939,38 +934,32 @@ pub fn parse_ast_fragment<'a>(
}
pub fn ensure_complete_parse<'a>(
- this: &mut Parser<'a>,
+ parser: &mut Parser<'a>,
macro_path: &ast::Path,
kind_name: &str,
span: Span,
) {
- if this.token != token::Eof {
- let token = pprust::token_to_string(&this.token);
- let msg = format!("macro expansion ignores token `{}` and any following", token);
+ if parser.token != token::Eof {
+ let token = pprust::token_to_string(&parser.token);
// Avoid emitting backtrace info twice.
- let def_site_span = this.token.span.with_ctxt(SyntaxContext::root());
- let mut err = this.struct_span_err(def_site_span, &msg);
- err.span_label(span, "caused by the macro expansion here");
- let msg = format!(
- "the usage of `{}!` is likely invalid in {} context",
- pprust::path_to_string(macro_path),
- kind_name,
- );
- err.note(&msg);
-
- let semi_span = this.sess.source_map().next_point(span);
- match this.sess.source_map().span_to_snippet(semi_span) {
- Ok(ref snippet) if &snippet[..] != ";" && kind_name == "expression" => {
- err.span_suggestion(
- span.shrink_to_hi(),
- "you might be missing a semicolon here",
- ";",
- Applicability::MaybeIncorrect,
- );
+ let def_site_span = parser.token.span.with_ctxt(SyntaxContext::root());
+
+ let semi_span = parser.sess.source_map().next_point(span);
+ let add_semicolon = match &parser.sess.source_map().span_to_snippet(semi_span) {
+ Ok(snippet) if &snippet[..] != ";" && kind_name == "expression" => {
+ Some(span.shrink_to_hi())
}
- _ => {}
- }
- err.emit();
+ _ => None,
+ };
+
+ parser.sess.emit_err(IncompleteParse {
+ span: def_site_span,
+ token,
+ label_span: span,
+ macro_path,
+ kind_name,
+ add_semicolon,
+ });
}
}
@@ -1766,9 +1755,8 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
if self.expand_cfg_true(node, attr, pos) {
continue;
}
- let msg =
- format!("removing {} is not supported in this position", Node::descr());
- self.cx.span_err(span, &msg);
+
+ self.cx.emit_err(RemoveNodeNotSupported { span, descr: Node::descr() });
continue;
}
sym::cfg_attr => {
diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs
index b34de94fb..897268566 100644
--- a/compiler/rustc_expand/src/lib.rs
+++ b/compiler/rustc_expand/src/lib.rs
@@ -10,6 +10,7 @@
#![feature(rustc_attrs)]
#![feature(try_blocks)]
#![recursion_limit = "256"]
+#![deny(rustc::untranslatable_diagnostic)]
#[macro_use]
extern crate rustc_macros;
@@ -31,8 +32,13 @@ pub mod config;
pub mod errors;
pub mod expand;
pub mod module;
+
+// FIXME(Nilstrieb) Translate proc_macro diagnostics
+#[allow(rustc::untranslatable_diagnostic)]
pub mod proc_macro;
+// FIXME(Nilstrieb) Translate macro_rules diagnostics
+#[allow(rustc::untranslatable_diagnostic)]
pub(crate) mod mbe;
// HACK(Centril, #64197): These shouldn't really be here.
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
index 197f05691..f469b2dae 100644
--- a/compiler/rustc_expand/src/mbe/diagnostics.rs
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -43,7 +43,7 @@ pub(super) fn failed_to_match_macro<'cx>(
return result;
}
- let Some((token, label, remaining_matcher)) = tracker.best_failure else {
+ let Some(BestFailure { token, msg: label, remaining_matcher, .. }) = tracker.best_failure else {
return DummyResult::any(sp);
};
@@ -95,12 +95,31 @@ struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
cx: &'a mut ExtCtxt<'cx>,
remaining_matcher: Option<&'matcher MatcherLoc>,
/// Which arm's failure should we report? (the one furthest along)
- best_failure: Option<(Token, &'static str, MatcherLoc)>,
+ best_failure: Option<BestFailure>,
root_span: Span,
result: Option<Box<dyn MacResult + 'cx>>,
}
+struct BestFailure {
+ token: Token,
+ position_in_tokenstream: usize,
+ msg: &'static str,
+ remaining_matcher: MatcherLoc,
+}
+
+impl BestFailure {
+ fn is_better_position(&self, position: usize) -> bool {
+ position > self.position_in_tokenstream
+ }
+}
+
impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
+ type Failure = (Token, usize, &'static str);
+
+ fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
+ (tok, position, msg)
+ }
+
fn before_match_loc(&mut self, parser: &TtParser, matcher: &'matcher MatcherLoc) {
if self.remaining_matcher.is_none()
|| (parser.has_no_remaining_items_for_step() && *matcher != MatcherLoc::Eof)
@@ -109,7 +128,7 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
}
}
- fn after_arm(&mut self, result: &NamedParseResult) {
+ fn after_arm(&mut self, result: &NamedParseResult<Self::Failure>) {
match result {
Success(_) => {
// Nonterminal parser recovery might turn failed matches into successful ones,
@@ -119,18 +138,25 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
"should not collect detailed info for successful macro match",
);
}
- Failure(token, msg) => match self.best_failure {
- Some((ref best_token, _, _)) if best_token.span.lo() >= token.span.lo() => {}
- _ => {
- self.best_failure = Some((
- token.clone(),
+ Failure((token, approx_position, msg)) => {
+ debug!(?token, ?msg, "a new failure of an arm");
+
+ if self
+ .best_failure
+ .as_ref()
+ .map_or(true, |failure| failure.is_better_position(*approx_position))
+ {
+ self.best_failure = Some(BestFailure {
+ token: token.clone(),
+ position_in_tokenstream: *approx_position,
msg,
- self.remaining_matcher
+ remaining_matcher: self
+ .remaining_matcher
.expect("must have collected matcher already")
.clone(),
- ))
+ })
}
- },
+ }
Error(err_sp, msg) => {
let span = err_sp.substitute_dummy(self.root_span);
self.cx.struct_span_err(span, msg).emit();
@@ -155,6 +181,21 @@ impl<'a, 'cx> CollectTrackerAndEmitter<'a, 'cx, '_> {
}
}
+/// Currently used by macro_rules! compilation to extract a little information from the `Failure` case.
+pub struct FailureForwarder;
+
+impl<'matcher> Tracker<'matcher> for FailureForwarder {
+ type Failure = (Token, usize, &'static str);
+
+ fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
+ (tok, position, msg)
+ }
+
+ fn description() -> &'static str {
+ "failure-forwarder"
+ }
+}
+
pub(super) fn emit_frag_parse_err(
mut e: DiagnosticBuilder<'_, rustc_errors::ErrorGuaranteed>,
parser: &Parser<'_>,
@@ -178,12 +219,12 @@ pub(super) fn emit_frag_parse_err(
);
if !e.span.is_dummy() {
// early end of macro arm (#52866)
- e.replace_span_with(parser.token.span.shrink_to_hi());
+ e.replace_span_with(parser.token.span.shrink_to_hi(), true);
}
}
if e.span.is_dummy() {
// Get around lack of span in error (#30128)
- e.replace_span_with(site_span);
+ e.replace_span_with(site_span, true);
if !parser.sess.source_map().is_imported(arm_span) {
e.span_label(arm_span, "in this macro arm");
}
diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs
index 8994a2f78..5be134f4e 100644
--- a/compiler/rustc_expand/src/mbe/macro_check.rs
+++ b/compiler/rustc_expand/src/mbe/macro_check.rs
@@ -151,9 +151,9 @@ impl<'a, T> Iterator for &'a Stack<'a, T> {
// Iterates from top to bottom of the stack.
fn next(&mut self) -> Option<&'a T> {
- match *self {
+ match self {
Stack::Empty => None,
- Stack::Push { ref top, ref prev } => {
+ Stack::Push { top, prev } => {
*self = prev;
Some(top)
}
@@ -437,8 +437,8 @@ fn check_nested_occurrences(
// We check that the meta-variable is correctly used.
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
}
- (NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
- | (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
+ (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(_, del))
+ | (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
if del.delim == Delimiter::Brace =>
{
let macro_rules = state == NestedMacroState::MacroRulesNotName;
@@ -468,7 +468,7 @@ fn check_nested_occurrences(
// We check that the meta-variable is correctly used.
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
}
- (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
+ (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
if del.delim == Delimiter::Parenthesis =>
{
state = NestedMacroState::MacroNameParen;
@@ -483,7 +483,7 @@ fn check_nested_occurrences(
valid,
);
}
- (NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
+ (NestedMacroState::MacroNameParen, TokenTree::Delimited(_, del))
if del.delim == Delimiter::Brace =>
{
state = NestedMacroState::Empty;
@@ -497,7 +497,7 @@ fn check_nested_occurrences(
valid,
);
}
- (_, ref tt) => {
+ (_, tt) => {
state = NestedMacroState::Empty;
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
}
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index d161868ed..2e199541b 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -305,12 +305,13 @@ enum EofMatcherPositions {
}
/// Represents the possible results of an attempted parse.
-pub(crate) enum ParseResult<T> {
+pub(crate) enum ParseResult<T, F> {
/// Parsed successfully.
Success(T),
/// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
- Failure(Token, &'static str),
+ /// The usize is the approximate position of the token in the input token stream.
+ Failure(F),
/// Fatal error (malformed macro?). Abort compilation.
Error(rustc_span::Span, String),
ErrorReported(ErrorGuaranteed),
@@ -319,7 +320,7 @@ pub(crate) enum ParseResult<T> {
/// A `ParseResult` where the `Success` variant contains a mapping of
/// `MacroRulesNormalizedIdent`s to `NamedMatch`es. This represents the mapping
/// of metavars to the token trees they bind to.
-pub(crate) type NamedParseResult = ParseResult<NamedMatches>;
+pub(crate) type NamedParseResult<F> = ParseResult<NamedMatches, F>;
/// Contains a mapping of `MacroRulesNormalizedIdent`s to `NamedMatch`es.
/// This represents the mapping of metavars to the token trees they bind to.
@@ -455,8 +456,9 @@ impl TtParser {
&mut self,
matcher: &'matcher [MatcherLoc],
token: &Token,
+ approx_position: usize,
track: &mut T,
- ) -> Option<NamedParseResult> {
+ ) -> Option<NamedParseResult<T::Failure>> {
// Matcher positions that would be valid if the macro invocation was over now. Only
// modified if `token == Eof`.
let mut eof_mps = EofMatcherPositions::None;
@@ -593,13 +595,14 @@ impl TtParser {
EofMatcherPositions::Multiple => {
Error(token.span, "ambiguity: multiple successful parses".to_string())
}
- EofMatcherPositions::None => Failure(
+ EofMatcherPositions::None => Failure(T::build_failure(
Token::new(
token::Eof,
if token.span.is_dummy() { token.span } else { token.span.shrink_to_hi() },
),
+ approx_position,
"missing tokens in macro arguments",
- ),
+ )),
})
} else {
None
@@ -612,7 +615,7 @@ impl TtParser {
parser: &mut Cow<'_, Parser<'_>>,
matcher: &'matcher [MatcherLoc],
track: &mut T,
- ) -> NamedParseResult {
+ ) -> NamedParseResult<T::Failure> {
// A queue of possible matcher positions. We initialize it with the matcher position in
// which the "dot" is before the first token of the first token tree in `matcher`.
// `parse_tt_inner` then processes all of these possible matcher positions and produces
@@ -627,7 +630,12 @@ impl TtParser {
// Process `cur_mps` until either we have finished the input or we need to get some
// parsing from the black-box parser done.
- let res = self.parse_tt_inner(matcher, &parser.token, track);
+ let res = self.parse_tt_inner(
+ matcher,
+ &parser.token,
+ parser.approx_token_stream_pos(),
+ track,
+ );
if let Some(res) = res {
return res;
}
@@ -640,10 +648,11 @@ impl TtParser {
(0, 0) => {
// There are no possible next positions AND we aren't waiting for the black-box
// parser: syntax error.
- return Failure(
+ return Failure(T::build_failure(
parser.token.clone(),
+ parser.approx_token_stream_pos(),
"no rules expected this token in macro call",
- );
+ ));
}
(_, 0) => {
@@ -702,11 +711,11 @@ impl TtParser {
}
}
- fn ambiguity_error(
+ fn ambiguity_error<F>(
&self,
matcher: &[MatcherLoc],
token_span: rustc_span::Span,
- ) -> NamedParseResult {
+ ) -> NamedParseResult<F> {
let nts = self
.bb_mps
.iter()
@@ -732,11 +741,11 @@ impl TtParser {
)
}
- fn nameize<I: Iterator<Item = NamedMatch>>(
+ fn nameize<I: Iterator<Item = NamedMatch>, F>(
&self,
matcher: &[MatcherLoc],
mut res: I,
- ) -> NamedParseResult {
+ ) -> NamedParseResult<F> {
// Make that each metavar has _exactly one_ binding. If so, insert the binding into the
// `NamedParseResult`. Otherwise, it's an error.
let mut ret_val = FxHashMap::default();
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 2dbb90e21..4ebd75f01 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -141,31 +141,40 @@ fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span
}
pub(super) trait Tracker<'matcher> {
+ /// The contents of `ParseResult::Failure`.
+ type Failure;
+
+ /// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
+ /// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
+ /// The usize is the approximate position of the token in the input token stream.
+ fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure;
+
/// This is called before trying to match next MatcherLoc on the current token.
- fn before_match_loc(&mut self, parser: &TtParser, matcher: &'matcher MatcherLoc);
+ fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
/// This is called after an arm has been parsed, either successfully or unsuccessfully. When this is called,
/// `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
- fn after_arm(&mut self, result: &NamedParseResult);
+ fn after_arm(&mut self, _result: &NamedParseResult<Self::Failure>) {}
/// For tracing.
fn description() -> &'static str;
- fn recovery() -> Recovery;
+ fn recovery() -> Recovery {
+ Recovery::Forbidden
+ }
}
/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to monomorphization.
pub(super) struct NoopTracker;
impl<'matcher> Tracker<'matcher> for NoopTracker {
- fn before_match_loc(&mut self, _: &TtParser, _: &'matcher MatcherLoc) {}
- fn after_arm(&mut self, _: &NamedParseResult) {}
+ type Failure = ();
+
+ fn build_failure(_tok: Token, _position: usize, _msg: &'static str) -> Self::Failure {}
+
fn description() -> &'static str {
"none"
}
- fn recovery() -> Recovery {
- Recovery::Forbidden
- }
}
/// Expands the rules based macro defined by `lhses` and `rhses` for a given
@@ -326,7 +335,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
return Ok((i, named_matches));
}
- Failure(_, _) => {
+ Failure(_) => {
trace!("Failed to match arm, trying the next one");
// Try the next arm.
}
@@ -381,11 +390,13 @@ pub fn compile_declarative_macro(
let rhs_nm = Ident::new(sym::rhs, def.span);
let tt_spec = Some(NonterminalKind::TT);
- // Parse the macro_rules! invocation
- let (macro_rules, body) = match &def.kind {
- ast::ItemKind::MacroDef(def) => (def.macro_rules, def.body.tokens.clone()),
+ let macro_def = match &def.kind {
+ ast::ItemKind::MacroDef(def) => def,
_ => unreachable!(),
};
+ let macro_rules = macro_def.macro_rules;
+
+ // Parse the macro_rules! invocation
// The pattern that macro_rules matches.
// The grammar for macro_rules! is:
@@ -426,13 +437,32 @@ pub fn compile_declarative_macro(
// Convert it into `MatcherLoc` form.
let argument_gram = mbe::macro_parser::compute_locs(&argument_gram);
- let parser = Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS);
+ let create_parser = || {
+ let body = macro_def.body.tokens.clone();
+ Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS)
+ };
+
+ let parser = create_parser();
let mut tt_parser =
TtParser::new(Ident::with_dummy_span(if macro_rules { kw::MacroRules } else { kw::Macro }));
let argument_map =
match tt_parser.parse_tt(&mut Cow::Owned(parser), &argument_gram, &mut NoopTracker) {
Success(m) => m,
- Failure(token, msg) => {
+ Failure(()) => {
+ // The fast `NoopTracker` doesn't have any info on failure, so we need to retry it with another one
+ // that gives us the information we need.
+ // For this we need to reclone the macro body as the previous parser consumed it.
+ let retry_parser = create_parser();
+
+ let parse_result = tt_parser.parse_tt(
+ &mut Cow::Owned(retry_parser),
+ &argument_gram,
+ &mut diagnostics::FailureForwarder,
+ );
+ let Failure((token, _, msg)) = parse_result else {
+ unreachable!("matcher returned something other than Failure after retry");
+ };
+
let s = parse_failure_msg(&token);
let sp = token.span.substitute_dummy(def.span);
let mut err = sess.parse_sess.span_diagnostic.struct_span_err(sp, &s);
@@ -456,11 +486,11 @@ pub fn compile_declarative_macro(
let mut valid = true;
// Extract the arguments:
- let lhses = match argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
- MatchedSeq(ref s) => s
+ let lhses = match &argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
+ MatchedSeq(s) => s
.iter()
.map(|m| {
- if let MatchedTokenTree(ref tt) = *m {
+ if let MatchedTokenTree(tt) = m {
let tt = mbe::quoted::parse(
TokenStream::new(vec![tt.clone()]),
true,
@@ -480,11 +510,11 @@ pub fn compile_declarative_macro(
_ => sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
};
- let rhses = match argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
- MatchedSeq(ref s) => s
+ let rhses = match &argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
+ MatchedSeq(s) => s
.iter()
.map(|m| {
- if let MatchedTokenTree(ref tt) = *m {
+ if let MatchedTokenTree(tt) = m {
return mbe::quoted::parse(
TokenStream::new(vec![tt.clone()]),
false,
@@ -594,21 +624,21 @@ fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree)
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
use mbe::TokenTree;
for tt in tts {
- match *tt {
+ match tt {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarExpr(..) => (),
- TokenTree::Delimited(_, ref del) => {
+ TokenTree::Delimited(_, del) => {
if !check_lhs_no_empty_seq(sess, &del.tts) {
return false;
}
}
- TokenTree::Sequence(span, ref seq) => {
+ TokenTree::Sequence(span, seq) => {
if seq.separator.is_none()
- && seq.tts.iter().all(|seq_tt| match *seq_tt {
+ && seq.tts.iter().all(|seq_tt| match seq_tt {
TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => true,
- TokenTree::Sequence(_, ref sub_seq) => {
+ TokenTree::Sequence(_, sub_seq) => {
sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|| sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
}
@@ -706,21 +736,21 @@ impl<'tt> FirstSets<'tt> {
fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
let mut first = TokenSet::empty();
for tt in tts.iter().rev() {
- match *tt {
+ match tt {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarExpr(..) => {
first.replace_with(TtHandle::TtRef(tt));
}
- TokenTree::Delimited(span, ref delimited) => {
+ TokenTree::Delimited(span, delimited) => {
build_recur(sets, &delimited.tts);
first.replace_with(TtHandle::from_token_kind(
token::OpenDelim(delimited.delim),
span.open,
));
}
- TokenTree::Sequence(sp, ref seq_rep) => {
+ TokenTree::Sequence(sp, seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts);
match sets.first.entry(sp.entire()) {
@@ -774,7 +804,7 @@ impl<'tt> FirstSets<'tt> {
let mut first = TokenSet::empty();
for tt in tts.iter() {
assert!(first.maybe_empty);
- match *tt {
+ match tt {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
@@ -782,17 +812,17 @@ impl<'tt> FirstSets<'tt> {
first.add_one(TtHandle::TtRef(tt));
return first;
}
- TokenTree::Delimited(span, ref delimited) => {
+ TokenTree::Delimited(span, delimited) => {
first.add_one(TtHandle::from_token_kind(
token::OpenDelim(delimited.delim),
span.open,
));
return first;
}
- TokenTree::Sequence(sp, ref seq_rep) => {
+ TokenTree::Sequence(sp, seq_rep) => {
let subfirst_owned;
let subfirst = match self.first.get(&sp.entire()) {
- Some(&Some(ref subfirst)) => subfirst,
+ Some(Some(subfirst)) => subfirst,
Some(&None) => {
subfirst_owned = self.first(&seq_rep.tts);
&subfirst_owned
@@ -1011,7 +1041,7 @@ fn check_matcher_core<'tt>(
// First, update `last` so that it corresponds to the set
// of NT tokens that might end the sequence `... token`.
- match *token {
+ match token {
TokenTree::Token(..)
| TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..)
@@ -1027,7 +1057,7 @@ fn check_matcher_core<'tt>(
suffix_first = build_suffix_first();
}
}
- TokenTree::Delimited(span, ref d) => {
+ TokenTree::Delimited(span, d) => {
let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
token::CloseDelim(d.delim),
span.close,
@@ -1040,7 +1070,7 @@ fn check_matcher_core<'tt>(
// against SUFFIX
continue 'each_token;
}
- TokenTree::Sequence(_, ref seq_rep) => {
+ TokenTree::Sequence(_, seq_rep) => {
suffix_first = build_suffix_first();
// The trick here: when we check the interior, we want
// to include the separator (if any) as a potential
@@ -1166,11 +1196,7 @@ fn check_matcher_core<'tt>(
err.note(&format!(
"{}{} or {}",
msg,
- ts[..ts.len() - 1]
- .iter()
- .copied()
- .collect::<Vec<_>>()
- .join(", "),
+ ts[..ts.len() - 1].to_vec().join(", "),
ts[ts.len() - 1],
));
}
@@ -1346,8 +1372,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
}
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
- match *tt {
- mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token).into(),
+ match tt {
+ mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(),
mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind),
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name),
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index ee17d54f6..bc298b0ad 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -171,7 +171,7 @@ fn parse_tree(
} else {
match delim {
Delimiter::Brace => {
- // The delimiter is `{`. This indicates the beginning
+ // The delimiter is `{`. This indicates the beginning
// of a meta-variable expression (e.g. `${count(ident)}`).
// Try to parse the meta-variable expression.
match MetaVarExpr::parse(&tts, delim_span.entire(), sess) {
@@ -200,7 +200,7 @@ fn parse_tree(
}
}
// If we didn't find a metavar expression above, then we must have a
- // repetition sequence in the macro (e.g. `$(pat)*`). Parse the
+ // repetition sequence in the macro (e.g. `$(pat)*`). Parse the
// contents of the sequence itself
let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
// Get the Kleene operator and optional separator
@@ -356,7 +356,7 @@ fn parse_sep_and_kleene_op(
// `$$` or a meta-variable is the lhs of a macro but shouldn't.
//
// For example, `macro_rules! foo { ( ${length()} ) => {} }`
-fn span_dollar_dollar_or_metavar_in_the_lhs_err<'sess>(sess: &'sess ParseSess, token: &Token) {
+fn span_dollar_dollar_or_metavar_in_the_lhs_err(sess: &ParseSess, token: &Token) {
sess.span_diagnostic
.span_err(token.span, &format!("unexpected token: {}", pprust::token_to_string(token)));
sess.span_diagnostic.span_note_without_error(
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index bec6d1a2d..b79835be7 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -47,8 +47,7 @@ impl<'a> Iterator for Frame<'a> {
fn next(&mut self) -> Option<&'a mbe::TokenTree> {
match self {
- Frame::Delimited { tts, ref mut idx, .. }
- | Frame::Sequence { tts, ref mut idx, .. } => {
+ Frame::Delimited { tts, idx, .. } | Frame::Sequence { tts, idx, .. } => {
let res = tts.get(*idx);
*idx += 1;
res
@@ -220,13 +219,13 @@ pub(super) fn transcribe<'a>(
let ident = MacroRulesNormalizedIdent::new(original_ident);
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
match cur_matched {
- MatchedTokenTree(ref tt) => {
+ MatchedTokenTree(tt) => {
// `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups.
let token = tt.clone();
result.push(token);
}
- MatchedNonterminal(ref nt) => {
+ MatchedNonterminal(nt) => {
// Other variables are emitted into the output stream as groups with
// `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser.
@@ -299,12 +298,11 @@ fn lookup_cur_matched<'a>(
interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
repeats: &[(usize, usize)],
) -> Option<&'a NamedMatch> {
- interpolations.get(&ident).map(|matched| {
- let mut matched = matched;
+ interpolations.get(&ident).map(|mut matched| {
for &(idx, _) in repeats {
match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => break,
- MatchedSeq(ref ads) => matched = ads.get(idx).unwrap(),
+ MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
}
}
@@ -339,7 +337,7 @@ impl LockstepIterSize {
match self {
LockstepIterSize::Unconstrained => other,
LockstepIterSize::Contradiction(_) => self,
- LockstepIterSize::Constraint(l_len, ref l_id) => match other {
+ LockstepIterSize::Constraint(l_len, l_id) => match other {
LockstepIterSize::Unconstrained => self,
LockstepIterSize::Contradiction(_) => other,
LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
@@ -378,33 +376,33 @@ fn lockstep_iter_size(
repeats: &[(usize, usize)],
) -> LockstepIterSize {
use mbe::TokenTree;
- match *tree {
- TokenTree::Delimited(_, ref delimited) => {
+ match tree {
+ TokenTree::Delimited(_, delimited) => {
delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
size.with(lockstep_iter_size(tt, interpolations, repeats))
})
}
- TokenTree::Sequence(_, ref seq) => {
+ TokenTree::Sequence(_, seq) => {
seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
size.with(lockstep_iter_size(tt, interpolations, repeats))
})
}
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
- let name = MacroRulesNormalizedIdent::new(name);
+ let name = MacroRulesNormalizedIdent::new(*name);
match lookup_cur_matched(name, interpolations, repeats) {
Some(matched) => match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
- MatchedSeq(ref ads) => LockstepIterSize::Constraint(ads.len(), name),
+ MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
},
_ => LockstepIterSize::Unconstrained,
}
}
- TokenTree::MetaVarExpr(_, ref expr) => {
+ TokenTree::MetaVarExpr(_, expr) => {
let default_rslt = LockstepIterSize::Unconstrained;
let Some(ident) = expr.ident() else { return default_rslt; };
let name = MacroRulesNormalizedIdent::new(ident);
match lookup_cur_matched(name, interpolations, repeats) {
- Some(MatchedSeq(ref ads)) => {
+ Some(MatchedSeq(ads)) => {
default_rslt.with(LockstepIterSize::Constraint(ads.len(), name))
}
_ => default_rslt,
@@ -449,7 +447,7 @@ fn count_repetitions<'a>(
Some(_) => Err(out_of_bounds_err(cx, declared_lhs_depth, sp.entire(), "count")),
}
}
- MatchedSeq(ref named_matches) => {
+ MatchedSeq(named_matches) => {
let new_declared_lhs_depth = declared_lhs_depth + 1;
match depth_opt {
None => named_matches
@@ -472,7 +470,7 @@ fn count_repetitions<'a>(
// before we start counting. `matched` contains the various levels of the
// tree as we descend, and its final value is the subtree we are currently at.
for &(idx, _) in repeats {
- if let MatchedSeq(ref ads) = matched {
+ if let MatchedSeq(ads) = matched {
matched = &ads[idx];
}
}
diff --git a/compiler/rustc_expand/src/module.rs b/compiler/rustc_expand/src/module.rs
index 9002a24e4..07f47a9c3 100644
--- a/compiler/rustc_expand/src/module.rs
+++ b/compiler/rustc_expand/src/module.rs
@@ -1,13 +1,17 @@
use crate::base::ModuleData;
+use crate::errors::{
+ ModuleCircular, ModuleFileNotFound, ModuleInBlock, ModuleInBlockName, ModuleMultipleCandidates,
+};
use rustc_ast::ptr::P;
use rustc_ast::{token, AttrVec, Attribute, Inline, Item, ModSpans};
-use rustc_errors::{struct_span_err, DiagnosticBuilder, ErrorGuaranteed};
+use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
use rustc_parse::new_parser_from_file;
use rustc_parse::validate_attr;
use rustc_session::parse::ParseSess;
use rustc_session::Session;
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
+use std::iter::once;
use std::path::{self, Path, PathBuf};
@@ -242,57 +246,41 @@ pub fn default_submod_path<'a>(
impl ModError<'_> {
fn report(self, sess: &Session, span: Span) -> ErrorGuaranteed {
- let diag = &sess.parse_sess.span_diagnostic;
match self {
ModError::CircularInclusion(file_paths) => {
- let mut msg = String::from("circular modules: ");
- for file_path in &file_paths {
- msg.push_str(&file_path.display().to_string());
- msg.push_str(" -> ");
- }
- msg.push_str(&file_paths[0].display().to_string());
- diag.struct_span_err(span, &msg)
- }
- ModError::ModInBlock(ident) => {
- let msg = "cannot declare a non-inline module inside a block unless it has a path attribute";
- let mut err = diag.struct_span_err(span, msg);
- if let Some(ident) = ident {
- let note =
- format!("maybe `use` the module `{}` instead of redeclaring it", ident);
- err.span_note(span, &note);
- }
- err
+ let path_to_string = |path: &PathBuf| path.display().to_string();
+
+ let paths = file_paths
+ .iter()
+ .map(path_to_string)
+ .chain(once(path_to_string(&file_paths[0])))
+ .collect::<Vec<_>>();
+
+ let modules = paths.join(" -> ");
+
+ sess.emit_err(ModuleCircular { span, modules })
}
- ModError::FileNotFound(ident, default_path, secondary_path) => {
- let mut err = struct_span_err!(
- diag,
+ ModError::ModInBlock(ident) => sess.emit_err(ModuleInBlock {
+ span,
+ name: ident.map(|name| ModuleInBlockName { span, name }),
+ }),
+ ModError::FileNotFound(name, default_path, secondary_path) => {
+ sess.emit_err(ModuleFileNotFound {
span,
- E0583,
- "file not found for module `{}`",
- ident,
- );
- err.help(&format!(
- "to create the module `{}`, create file \"{}\" or \"{}\"",
- ident,
- default_path.display(),
- secondary_path.display(),
- ));
- err
+ name,
+ default_path: default_path.display().to_string(),
+ secondary_path: secondary_path.display().to_string(),
+ })
}
- ModError::MultipleCandidates(ident, default_path, secondary_path) => {
- let mut err = struct_span_err!(
- diag,
+ ModError::MultipleCandidates(name, default_path, secondary_path) => {
+ sess.emit_err(ModuleMultipleCandidates {
span,
- E0761,
- "file for module `{}` found at both \"{}\" and \"{}\"",
- ident,
- default_path.display(),
- secondary_path.display(),
- );
- err.help("delete or rename one of them to remove the ambiguity");
- err
+ name,
+ default_path: default_path.display().to_string(),
+ secondary_path: secondary_path.display().to_string(),
+ })
}
- ModError::ParserError(err) => err,
- }.emit()
+ ModError::ParserError(mut err) => err.emit(),
+ }
}
}
diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs
index e49f112bf..0726d922c 100644
--- a/compiler/rustc_expand/src/parse/tests.rs
+++ b/compiler/rustc_expand/src/parse/tests.rs
@@ -176,9 +176,9 @@ fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
}
impl<'a> visit::Visitor<'a> for PatIdentVisitor {
fn visit_pat(&mut self, p: &'a ast::Pat) {
- match p.kind {
- PatKind::Ident(_, ref ident, _) => {
- self.spans.push(ident.span.clone());
+ match &p.kind {
+ PatKind::Ident(_, ident, _) => {
+ self.spans.push(ident.span);
}
_ => {
visit::walk_pat(self, p);
@@ -290,10 +290,8 @@ fn ttdelim_span() {
)
.unwrap();
- let tts: Vec<_> = match expr.kind {
- ast::ExprKind::MacCall(ref mac) => mac.args.tokens.clone().into_trees().collect(),
- _ => panic!("not a macro"),
- };
+ let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
+ let tts: Vec<_> = mac.args.tokens.clone().into_trees().collect();
let span = tts.iter().rev().next().unwrap().span();
@@ -318,11 +316,8 @@ fn out_of_line_mod() {
.unwrap()
.unwrap();
- if let ast::ItemKind::Mod(_, ref mod_kind) = item.kind {
- assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
- } else {
- panic!();
- }
+ let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() };
+ assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
});
}
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index 761657961..341ae1854 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -6,6 +6,7 @@ use pm::{Delimiter, Level, LineColumn};
use rustc_ast as ast;
use rustc_ast::token;
use rustc_ast::tokenstream::{self, Spacing::*, TokenStream};
+use rustc_ast::util::literal::escape_byte_str_symbol;
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
@@ -229,7 +230,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
let stream = TokenStream::from_nonterminal_ast(&nt);
// A hack used to pass AST fragments to attribute and derive
// macros as a single nonterminal token instead of a token
- // stream. Such token needs to be "unwrapped" and not
+ // stream. Such token needs to be "unwrapped" and not
// represented as a delimited group.
// FIXME: It needs to be removed, but there are some
// compatibility issues (see #73345).
@@ -526,7 +527,7 @@ impl server::TokenStream for Rustc<'_, '_> {
Ok(tokenstream::TokenStream::token_alone(token::Literal(*token_lit), expr.span))
}
ast::ExprKind::IncludedBytes(bytes) => {
- let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit();
+ let lit = token::Lit::new(token::ByteStr, escape_byte_str_symbol(bytes), None);
Ok(tokenstream::TokenStream::token_alone(token::TokenKind::Literal(lit), expr.span))
}
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
@@ -596,8 +597,8 @@ impl server::SourceFile for Rustc<'_, '_> {
}
fn path(&mut self, file: &Self::SourceFile) -> String {
- match file.name {
- FileName::Real(ref name) => name
+ match &file.name {
+ FileName::Real(name) => name
.local_path()
.expect("attempting to get a file path in an imported file in `proc_macro::SourceFile::path`")
.to_str()
diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs
index 539b04535..8f3bea29f 100644
--- a/compiler/rustc_expand/src/tests.rs
+++ b/compiler/rustc_expand/src/tests.rs
@@ -154,6 +154,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
false,
);
let handler = Handler::with_emitter(true, None, Box::new(emitter));
+ #[allow(rustc::untranslatable_diagnostic)]
handler.span_err(msp, "foo");
assert!(