summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_parse/src/parser
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-07 05:48:48 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-07 05:48:48 +0000
commitef24de24a82fe681581cc130f342363c47c0969a (patch)
tree0d494f7e1a38b95c92426f58fe6eaa877303a86c /compiler/rustc_parse/src/parser
parentReleasing progress-linux version 1.74.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-ef24de24a82fe681581cc130f342363c47c0969a.tar.xz
rustc-ef24de24a82fe681581cc130f342363c47c0969a.zip
Merging upstream version 1.75.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_parse/src/parser')
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs211
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs239
-rw-r--r--compiler/rustc_parse/src/parser/item.rs106
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs35
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs37
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs37
-rw-r--r--compiler/rustc_parse/src/parser/path.rs38
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs60
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs18
9 files changed, 552 insertions, 229 deletions
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 06b1b1523..2a8eb6edd 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -39,7 +39,7 @@ use rustc_errors::{
use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident};
-use rustc_span::{Span, SpanSnippetError, Symbol, DUMMY_SP};
+use rustc_span::{BytePos, Span, SpanSnippetError, Symbol, DUMMY_SP};
use std::mem::take;
use std::ops::{Deref, DerefMut};
use thin_vec::{thin_vec, ThinVec};
@@ -314,11 +314,10 @@ impl<'a> Parser<'a> {
// which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
let ident_name = ident.name.to_string();
- Some(SuggEscapeIdentifier {
- span: ident.span.shrink_to_lo(),
- ident_name
- })
- } else { None };
+ Some(SuggEscapeIdentifier { span: ident.span.shrink_to_lo(), ident_name })
+ } else {
+ None
+ };
let suggest_remove_comma =
if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
@@ -375,9 +374,11 @@ impl<'a> Parser<'a> {
// and current token should be Ident with the item name (i.e. the function name)
// if there is a `<` after the fn name, then don't show a suggestion, show help
- if !self.look_ahead(1, |t| *t == token::Lt) &&
- let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) {
- err.multipart_suggestion_verbose(
+ if !self.look_ahead(1, |t| *t == token::Lt)
+ && let Ok(snippet) =
+ self.sess.source_map().span_to_snippet(generic.span)
+ {
+ err.multipart_suggestion_verbose(
format!("place the generic parameter name after the {ident_name} name"),
vec![
(self.token.span.shrink_to_hi(), snippet),
@@ -385,11 +386,11 @@ impl<'a> Parser<'a> {
],
Applicability::MaybeIncorrect,
);
- } else {
- err.help(format!(
- "place the generic parameter name after the {ident_name} name"
- ));
- }
+ } else {
+ err.help(format!(
+ "place the generic parameter name after the {ident_name} name"
+ ));
+ }
}
}
Err(err) => {
@@ -402,7 +403,9 @@ impl<'a> Parser<'a> {
}
}
- if let Some(recovered_ident) = recovered_ident && recover {
+ if let Some(recovered_ident) = recovered_ident
+ && recover
+ {
err.emit();
Ok(recovered_ident)
} else {
@@ -501,8 +504,10 @@ impl<'a> Parser<'a> {
// Special-case "expected `;`" errors
if expected.contains(&TokenType::Token(token::Semi)) {
- if self.prev_token == token::Question && self.maybe_recover_from_ternary_operator() {
- return Ok(true);
+ // If the user is trying to write a ternary expression, recover it and
+ // return an Err to prevent a cascade of irrelevant diagnostics
+ if self.prev_token == token::Question && let Err(e) = self.maybe_recover_from_ternary_operator() {
+ return Err(e);
}
if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
@@ -617,19 +622,19 @@ impl<'a> Parser<'a> {
}
if let TokenKind::Ident(prev, _) = &self.prev_token.kind
- && let TokenKind::Ident(cur, _) = &self.token.kind
+ && let TokenKind::Ident(cur, _) = &self.token.kind
{
- let concat = Symbol::intern(&format!("{prev}{cur}"));
- let ident = Ident::new(concat, DUMMY_SP);
- if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() {
- let span = self.prev_token.span.to(self.token.span);
- err.span_suggestion_verbose(
- span,
- format!("consider removing the space to spell keyword `{concat}`"),
- concat,
- Applicability::MachineApplicable,
- );
- }
+ let concat = Symbol::intern(&format!("{prev}{cur}"));
+ let ident = Ident::new(concat, DUMMY_SP);
+ if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() {
+ let span = self.prev_token.span.to(self.token.span);
+ err.span_suggestion_verbose(
+ span,
+ format!("consider removing the space to spell keyword `{concat}`"),
+ concat,
+ Applicability::MachineApplicable,
+ );
+ }
}
// `pub` may be used for an item or `pub(crate)`
@@ -645,6 +650,26 @@ impl<'a> Parser<'a> {
);
}
+ if let token::DocComment(kind, style, _) = self.token.kind {
+ // We have something like `expr //!val` where the user likely meant `expr // !val`
+ let pos = self.token.span.lo() + BytePos(2);
+ let span = self.token.span.with_lo(pos).with_hi(pos);
+ err.span_suggestion_verbose(
+ span,
+ format!(
+ "add a space before {} to write a regular comment",
+ match (kind, style) {
+ (token::CommentKind::Line, ast::AttrStyle::Inner) => "`!`",
+ (token::CommentKind::Block, ast::AttrStyle::Inner) => "`!`",
+ (token::CommentKind::Line, ast::AttrStyle::Outer) => "the last `/`",
+ (token::CommentKind::Block, ast::AttrStyle::Outer) => "the last `*`",
+ },
+ ),
+ " ".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+
// Add suggestion for a missing closing angle bracket if '>' is included in expected_tokens
// there are unclosed angle brackets
if self.unmatched_angle_bracket_count > 0
@@ -827,6 +852,65 @@ impl<'a> Parser<'a> {
None
}
+ pub(super) fn recover_closure_body(
+ &mut self,
+ mut err: DiagnosticBuilder<'a, ErrorGuaranteed>,
+ before: token::Token,
+ prev: token::Token,
+ token: token::Token,
+ lo: Span,
+ decl_hi: Span,
+ ) -> PResult<'a, P<Expr>> {
+ err.span_label(lo.to(decl_hi), "while parsing the body of this closure");
+ match before.kind {
+ token::OpenDelim(Delimiter::Brace)
+ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
+ {
+ // `{ || () }` should have been `|| { () }`
+ err.multipart_suggestion(
+ "you might have meant to open the body of the closure, instead of enclosing \
+ the closure in a block",
+ vec![
+ (before.span, String::new()),
+ (prev.span.shrink_to_hi(), " {".to_string()),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ err.emit();
+ self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+ }
+ token::OpenDelim(Delimiter::Parenthesis)
+ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
+ {
+ // We are within a function call or tuple, we can emit the error
+ // and recover.
+ self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis), &token::Comma]);
+
+ err.multipart_suggestion_verbose(
+ "you might have meant to open the body of the closure",
+ vec![
+ (prev.span.shrink_to_hi(), " {".to_string()),
+ (self.token.span.shrink_to_lo(), "}".to_string()),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ err.emit();
+ }
+ _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => {
+ // We don't have a heuristic to correctly identify where the block
+ // should be closed.
+ err.multipart_suggestion_verbose(
+ "you might have meant to open the body of the closure",
+ vec![(prev.span.shrink_to_hi(), " {".to_string())],
+ Applicability::HasPlaceholders,
+ );
+ return Err(err);
+ }
+ _ => return Err(err),
+ }
+ Ok(self.mk_expr_err(lo.to(self.token.span)))
+ }
+
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
@@ -1025,8 +1109,7 @@ impl<'a> Parser<'a> {
.emit();
match self.parse_expr() {
Ok(_) => {
- *expr =
- self.mk_expr_err(expr.span.to(self.prev_token.span));
+ *expr = self.mk_expr_err(expr.span.to(self.prev_token.span));
return Ok(());
}
Err(err) => {
@@ -1218,7 +1301,9 @@ impl<'a> Parser<'a> {
return if token::ModSep == self.token.kind {
// We have some certainty that this was a bad turbofish at this point.
// `foo< bar >::`
- if let ExprKind::Binary(o, ..) = inner_op.kind && o.node == BinOpKind::Lt {
+ if let ExprKind::Binary(o, ..) = inner_op.kind
+ && o.node == BinOpKind::Lt
+ {
err.suggest_turbofish = Some(op.span.shrink_to_lo());
} else {
err.help_turbofish = Some(());
@@ -1248,7 +1333,9 @@ impl<'a> Parser<'a> {
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
// We have high certainty that this was a bad turbofish at this point.
// `foo< bar >(`
- if let ExprKind::Binary(o, ..) = inner_op.kind && o.node == BinOpKind::Lt {
+ if let ExprKind::Binary(o, ..) = inner_op.kind
+ && o.node == BinOpKind::Lt
+ {
err.suggest_turbofish = Some(op.span.shrink_to_lo());
} else {
err.help_turbofish = Some(());
@@ -1343,10 +1430,10 @@ impl<'a> Parser<'a> {
/// Rust has no ternary operator (`cond ? then : else`). Parse it and try
/// to recover from it if `then` and `else` are valid expressions. Returns
- /// whether it was a ternary operator.
- pub(super) fn maybe_recover_from_ternary_operator(&mut self) -> bool {
+ /// an err if this appears to be a ternary expression.
+ pub(super) fn maybe_recover_from_ternary_operator(&mut self) -> PResult<'a, ()> {
if self.prev_token != token::Question {
- return false;
+ return PResult::Ok(());
}
let lo = self.prev_token.span.lo();
@@ -1364,20 +1451,18 @@ impl<'a> Parser<'a> {
if self.eat_noexpect(&token::Colon) {
match self.parse_expr() {
Ok(_) => {
- self.sess.emit_err(TernaryOperator { span: self.token.span.with_lo(lo) });
- return true;
+ return Err(self
+ .sess
+ .create_err(TernaryOperator { span: self.token.span.with_lo(lo) }));
}
Err(err) => {
err.cancel();
- self.restore_snapshot(snapshot);
}
};
}
- } else {
- self.restore_snapshot(snapshot);
- };
-
- false
+ }
+ self.restore_snapshot(snapshot);
+ Ok(())
}
pub(super) fn maybe_recover_from_bad_type_plus(&mut self, ty: &Ty) -> PResult<'a, ()> {
@@ -1826,19 +1911,21 @@ impl<'a> Parser<'a> {
let sm = self.sess.source_map();
let left = begin_par_sp;
let right = self.prev_token.span;
- let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left) &&
- !snip.ends_with(' ') {
- " ".to_string()
- } else {
- "".to_string()
- };
+ let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left)
+ && !snip.ends_with(' ')
+ {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
- let right_snippet = if let Ok(snip) = sm.span_to_next_source(right) &&
- !snip.starts_with(' ') {
- " ".to_string()
- } else {
- "".to_string()
- };
+ let right_snippet = if let Ok(snip) = sm.span_to_next_source(right)
+ && !snip.starts_with(' ')
+ {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
self.sess.emit_err(ParenthesesInForHead {
span: vec![left, right],
@@ -2721,8 +2808,15 @@ impl<'a> Parser<'a> {
}
pub fn recover_diff_marker(&mut self) {
+ if let Err(mut err) = self.err_diff_marker() {
+ err.emit();
+ FatalError.raise();
+ }
+ }
+
+ pub fn err_diff_marker(&mut self) -> PResult<'a, ()> {
let Some(start) = self.diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) else {
- return;
+ return Ok(());
};
let mut spans = Vec::with_capacity(3);
spans.push(start);
@@ -2769,8 +2863,7 @@ impl<'a> Parser<'a> {
"for an explanation on these markers from the `git` documentation, visit \
<https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
);
- err.emit();
- FatalError.raise()
+ Err(err)
}
/// Parse and throw away a parenthesized comma separated
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index f4cee3a66..235b28b6e 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -9,7 +9,7 @@ use super::{
use crate::errors;
use crate::maybe_recover_from_interpolated_ty_qpath;
use ast::mut_visit::{noop_visit_expr, MutVisitor};
-use ast::{Path, PathSegment};
+use ast::{GenBlockKind, Path, PathSegment};
use core::mem;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
@@ -32,10 +32,10 @@ use rustc_macros::Subdiagnostic;
use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
use rustc_session::lint::BuiltinLintDiagnostics;
-use rustc_span::source_map::{self, Span, Spanned};
+use rustc_span::source_map::{self, Spanned};
use rustc_span::symbol::kw::PathRoot;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::{BytePos, Pos};
+use rustc_span::{BytePos, Pos, Span};
use thin_vec::{thin_vec, ThinVec};
/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
@@ -1007,8 +1007,9 @@ impl<'a> Parser<'a> {
let span = self.token.span;
let sm = self.sess.source_map();
let (span, actual) = match (&self.token.kind, self.subparser_name) {
- (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) =>
- (span.shrink_to_hi(), actual.into()),
+ (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => {
+ (span.shrink_to_hi(), actual.into())
+ }
_ => (span, actual),
};
self.sess.emit_err(errors::UnexpectedTokenAfterDot { span, actual });
@@ -1440,14 +1441,20 @@ impl<'a> Parser<'a> {
} else if this.token.uninterpolated_span().at_least_rust_2018() {
// `Span:.at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
if this.check_keyword(kw::Async) {
- if this.is_async_block() {
+ if this.is_gen_block(kw::Async) {
// Check for `async {` and `async move {`.
- this.parse_async_block()
+ this.parse_gen_block()
} else {
this.parse_expr_closure()
}
} else if this.eat_keyword(kw::Await) {
this.recover_incorrect_await_syntax(lo, this.prev_token.span)
+ } else if this.token.uninterpolated_span().at_least_rust_2024() {
+ if this.is_gen_block(kw::Gen) {
+ this.parse_gen_block()
+ } else {
+ this.parse_expr_lit()
+ }
} else {
this.parse_expr_lit()
}
@@ -1550,10 +1557,7 @@ impl<'a> Parser<'a> {
self.sess.emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
}
let lo = path.span;
- let mac = P(MacCall {
- path,
- args: self.parse_delim_args()?,
- });
+ let mac = P(MacCall { path, args: self.parse_delim_args()? });
(lo.to(self.prev_token.span), ExprKind::MacCall(mac))
} else if self.check(&token::OpenDelim(Delimiter::Brace))
&& let Some(expr) = self.maybe_parse_struct_expr(&qself, &path)
@@ -1592,7 +1596,7 @@ impl<'a> Parser<'a> {
} else if !ate_colon
&& self.may_recover()
&& (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
- || self.token.is_op())
+ || self.token.is_punct())
{
let (lit, _) =
self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| {
@@ -1771,7 +1775,9 @@ impl<'a> Parser<'a> {
fn parse_expr_break(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.prev_token.span;
let mut label = self.eat_label();
- let kind = if self.token == token::Colon && let Some(label) = label.take() {
+ let kind = if self.token == token::Colon
+ && let Some(label) = label.take()
+ {
// The value expression can be a labeled loop, see issue #86948, e.g.:
// `loop { break 'label: loop { break 'label 42; }; }`
let lexpr = self.parse_expr_labeled(label, true)?;
@@ -1848,7 +1854,7 @@ impl<'a> Parser<'a> {
let lo = self.prev_token.span;
let kind = ExprKind::Yield(self.parse_expr_opt()?);
let span = lo.to(self.prev_token.span);
- self.sess.gated_spans.gate(sym::generators, span);
+ self.sess.gated_spans.gate(sym::yield_expr, span);
let expr = self.mk_expr(span, kind);
self.maybe_recover_from_bad_qpath(expr)
}
@@ -2209,6 +2215,7 @@ impl<'a> Parser<'a> {
fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
+ let before = self.prev_token.clone();
let binder = if self.check_keyword(kw::For) {
let lo = self.token.span;
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
@@ -2239,7 +2246,12 @@ impl<'a> Parser<'a> {
FnRetTy::Default(_) => {
let restrictions =
self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
- self.parse_expr_res(restrictions, None)?
+ let prev = self.prev_token.clone();
+ let token = self.token.clone();
+ match self.parse_expr_res(restrictions, None) {
+ Ok(expr) => expr,
+ Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
+ }
}
_ => {
// If an explicit return type is given, require a block to appear (RFC 968).
@@ -2291,13 +2303,14 @@ impl<'a> Parser<'a> {
/// Parses an optional `move` prefix to a closure-like construct.
fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
if self.eat_keyword(kw::Move) {
+ let move_kw_span = self.prev_token.span;
// Check for `move async` and recover
if self.check_keyword(kw::Async) {
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
Err(errors::AsyncMoveOrderIncorrect { span: move_async_span }
.into_diagnostic(&self.sess.span_diagnostic))
} else {
- Ok(CaptureBy::Value)
+ Ok(CaptureBy::Value { move_kw: move_kw_span })
}
} else {
Ok(CaptureBy::Ref)
@@ -2371,16 +2384,18 @@ impl<'a> Parser<'a> {
let mut recover_block_from_condition = |this: &mut Self| {
let block = match &mut cond.kind {
ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
- if let ExprKind::Block(_, None) = right.kind => {
- self.sess.emit_err(errors::IfExpressionMissingThenBlock {
- if_span: lo,
- missing_then_block_sub:
- errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(cond_span.shrink_to_lo().to(*binop_span)),
- let_else_sub: None,
-
- });
- std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
- },
+ if let ExprKind::Block(_, None) = right.kind =>
+ {
+ self.sess.emit_err(errors::IfExpressionMissingThenBlock {
+ if_span: lo,
+ missing_then_block_sub:
+ errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(
+ cond_span.shrink_to_lo().to(*binop_span),
+ ),
+ let_else_sub: None,
+ });
+ std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
+ }
ExprKind::Block(_, None) => {
self.sess.emit_err(errors::IfExpressionMissingCondition {
if_span: lo.shrink_to_hi(),
@@ -2426,10 +2441,26 @@ impl<'a> Parser<'a> {
self.error_on_extra_if(&cond)?;
// Parse block, which will always fail, but we can add a nice note to the error
self.parse_block().map_err(|mut err| {
- err.span_note(
- cond_span,
- "the `if` expression is missing a block after this condition",
- );
+ if self.prev_token == token::Semi
+ && self.token == token::AndAnd
+ && let maybe_let = self.look_ahead(1, |t| t.clone())
+ && maybe_let.is_keyword(kw::Let)
+ {
+ err.span_suggestion(
+ self.prev_token.span,
+ "consider removing this semicolon to parse the `let` as part of the same chain",
+ "",
+ Applicability::MachineApplicable,
+ ).span_note(
+ self.token.span.to(maybe_let.span),
+ "you likely meant to continue parsing the let-chain starting here",
+ );
+ } else {
+ err.span_note(
+ cond_span,
+ "the `if` expression is missing a block after this condition",
+ );
+ }
err
})?
}
@@ -2459,10 +2490,16 @@ impl<'a> Parser<'a> {
/// Parses a `let $pat = $expr` pseudo-expression.
fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
let is_recovered = if !restrictions.contains(Restrictions::ALLOW_LET) {
- Some(self.sess.emit_err(errors::ExpectedExpressionFoundLet {
+ let err = errors::ExpectedExpressionFoundLet {
span: self.token.span,
reason: ForbiddenLetReason::OtherForbidden,
- }))
+ };
+ if self.prev_token.kind == token::BinOp(token::Or) {
+ // This was part of a closure, the that part of the parser recover.
+ return Err(err.into_diagnostic(&self.sess.span_diagnostic));
+ } else {
+ Some(self.sess.emit_err(err))
+ }
} else {
None
};
@@ -2557,13 +2594,16 @@ impl<'a> Parser<'a> {
}
fn error_on_extra_if(&mut self, cond: &P<Expr>) -> PResult<'a, ()> {
- if let ExprKind::Binary(Spanned { span: binop_span, node: binop}, _, right) = &cond.kind &&
- let BinOpKind::And = binop &&
- let ExprKind::If(cond, ..) = &right.kind {
- Err(self.sess.create_err(errors::UnexpectedIfWithIf(binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()))))
- } else {
- Ok(())
- }
+ if let ExprKind::Binary(Spanned { span: binop_span, node: binop }, _, right) = &cond.kind
+ && let BinOpKind::And = binop
+ && let ExprKind::If(cond, ..) = &right.kind
+ {
+ Err(self.sess.create_err(errors::UnexpectedIfWithIf(
+ binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()),
+ )))
+ } else {
+ Ok(())
+ }
}
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
@@ -2834,7 +2874,7 @@ impl<'a> Parser<'a> {
)?;
let guard = if this.eat_keyword(kw::If) {
let if_span = this.prev_token.span;
- let mut cond = this.parse_expr_res(Restrictions::ALLOW_LET, None)?;
+ let mut cond = this.parse_match_guard_condition()?;
CondChecker { parser: this, forbid_let_reason: None }.visit_expr(&mut cond);
@@ -2860,9 +2900,9 @@ impl<'a> Parser<'a> {
{
err.span_suggestion(
this.token.span,
- "try using a fat arrow here",
+ "use a fat arrow to start a match arm",
"=>",
- Applicability::MaybeIncorrect,
+ Applicability::MachineApplicable,
);
err.emit();
this.bump();
@@ -2911,9 +2951,9 @@ impl<'a> Parser<'a> {
.or_else(|mut err| {
if this.token == token::FatArrow {
if let Ok(expr_lines) = sm.span_to_lines(expr.span)
- && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
- && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
- && expr_lines.lines.len() == 2
+ && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
+ && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
+ && expr_lines.lines.len() == 2
{
// We check whether there's any trailing code in the parse span,
// if there isn't, we very likely have the following:
@@ -2979,6 +3019,33 @@ impl<'a> Parser<'a> {
})
}
+ fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
+ self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, None).map_err(
+ |mut err| {
+ if self.prev_token == token::OpenDelim(Delimiter::Brace) {
+ let sugg_sp = self.prev_token.span.shrink_to_lo();
+ // Consume everything within the braces, let's avoid further parse
+ // errors.
+ self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
+ let msg = "you might have meant to start a match arm after the match guard";
+ if self.eat(&token::CloseDelim(Delimiter::Brace)) {
+ let applicability = if self.token.kind != token::FatArrow {
+ // We have high confidence that we indeed didn't have a struct
+ // literal in the match guard, but rather we had some operation
+ // that ended in a path, immediately followed by a block that was
+ // meant to be the match arm.
+ Applicability::MachineApplicable
+ } else {
+ Applicability::MaybeIncorrect
+ };
+ err.span_suggestion_verbose(sugg_sp, msg, "=> ".to_string(), applicability);
+ }
+ }
+ err
+ },
+ )
+ }
+
pub(crate) fn is_builtin(&self) -> bool {
self.token.is_keyword(kw::Builtin) && self.look_ahead(1, |t| *t == token::Pound)
}
@@ -3015,18 +3082,24 @@ impl<'a> Parser<'a> {
&& self.token.uninterpolated_span().at_least_rust_2018()
}
- /// Parses an `async move? {...}` expression.
- fn parse_async_block(&mut self) -> PResult<'a, P<Expr>> {
+ /// Parses an `async move? {...}` or `gen move? {...}` expression.
+ fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
- self.expect_keyword(kw::Async)?;
+ let kind = if self.eat_keyword(kw::Async) {
+ GenBlockKind::Async
+ } else {
+ assert!(self.eat_keyword(kw::Gen));
+ self.sess.gated_spans.gate(sym::gen_blocks, lo.to(self.token.span));
+ GenBlockKind::Gen
+ };
let capture_clause = self.parse_capture_clause()?;
let (attrs, body) = self.parse_inner_attrs_and_block()?;
- let kind = ExprKind::Async(capture_clause, body);
+ let kind = ExprKind::Gen(capture_clause, body, kind);
Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
}
- fn is_async_block(&self) -> bool {
- self.token.is_keyword(kw::Async)
+ fn is_gen_block(&self, kw: Symbol) -> bool {
+ self.token.is_keyword(kw)
&& ((
// `async move {`
self.is_keyword_ahead(1, &[kw::Move])
@@ -3049,9 +3122,10 @@ impl<'a> Parser<'a> {
|| self.look_ahead(2, |t| t == &token::Colon)
&& (
// `{ ident: token, ` cannot start a block.
- self.look_ahead(4, |t| t == &token::Comma) ||
- // `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`.
- self.look_ahead(3, |t| !t.can_begin_type())
+ self.look_ahead(4, |t| t == &token::Comma)
+ // `{ ident: ` cannot start a block unless it's a type ascription
+ // `ident: Type`.
+ || self.look_ahead(3, |t| !t.can_begin_type())
)
)
}
@@ -3091,6 +3165,7 @@ impl<'a> Parser<'a> {
let mut fields = ThinVec::new();
let mut base = ast::StructRest::None;
let mut recover_async = false;
+ let in_if_guard = self.restrictions.contains(Restrictions::IN_IF_GUARD);
let mut async_block_err = |e: &mut Diagnostic, span: Span| {
recover_async = true;
@@ -3128,6 +3203,26 @@ impl<'a> Parser<'a> {
e.span_label(pth.span, "while parsing this struct");
}
+ if let Some((ident, _)) = self.token.ident()
+ && !self.token.is_reserved_ident()
+ && self.look_ahead(1, |t| {
+ AssocOp::from_token(&t).is_some()
+ || matches!(t.kind, token::OpenDelim(_))
+ || t.kind == token::Dot
+ })
+ {
+ // Looks like they tried to write a shorthand, complex expression.
+ e.span_suggestion_verbose(
+ self.token.span.shrink_to_lo(),
+ "try naming a field",
+ &format!("{ident}: ",),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ if in_if_guard && close_delim == Delimiter::Brace {
+ return Err(e);
+ }
+
if !recover {
return Err(e);
}
@@ -3173,19 +3268,6 @@ impl<'a> Parser<'a> {
",",
Applicability::MachineApplicable,
);
- } else if is_shorthand
- && (AssocOp::from_token(&self.token).is_some()
- || matches!(&self.token.kind, token::OpenDelim(_))
- || self.token.kind == token::Dot)
- {
- // Looks like they tried to write a shorthand, complex expression.
- let ident = parsed_field.expect("is_shorthand implies Some").ident;
- e.span_suggestion(
- ident.span.shrink_to_lo(),
- "try naming a field",
- &format!("{ident}: "),
- Applicability::HasPlaceholders,
- );
}
}
if !recover {
@@ -3288,6 +3370,24 @@ impl<'a> Parser<'a> {
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
+ // Proactively check whether parsing the field will be incorrect.
+ let is_wrong = this.token.is_ident()
+ && !this.token.is_reserved_ident()
+ && !this.look_ahead(1, |t| {
+ t == &token::Colon
+ || t == &token::Eq
+ || t == &token::Comma
+ || t == &token::CloseDelim(Delimiter::Brace)
+ || t == &token::CloseDelim(Delimiter::Parenthesis)
+ });
+ if is_wrong {
+ return Err(errors::ExpectedStructField {
+ span: this.look_ahead(1, |t| t.span),
+ ident_span: this.token.span,
+ token: this.look_ahead(1, |t| t.clone()),
+ }
+ .into_diagnostic(&self.sess.span_diagnostic));
+ }
let (ident, expr) = if is_shorthand {
// Mimic `x: x` for the `x` field shorthand.
let ident = this.parse_ident_common(false)?;
@@ -3508,8 +3608,7 @@ impl MutVisitor for CondChecker<'_> {
noop_visit_expr(e, self);
self.forbid_let_reason = forbid_let_reason;
}
- ExprKind::Cast(ref mut op, _)
- | ExprKind::Type(ref mut op, _) => {
+ ExprKind::Cast(ref mut op, _) | ExprKind::Type(ref mut op, _) => {
let forbid_let_reason = self.forbid_let_reason;
self.forbid_let_reason = Some(OtherForbidden);
self.visit_expr(op);
@@ -3526,7 +3625,7 @@ impl MutVisitor for CondChecker<'_> {
| ExprKind::Match(_, _)
| ExprKind::Closure(_)
| ExprKind::Block(_, _)
- | ExprKind::Async(_, _)
+ | ExprKind::Gen(_, _, _)
| ExprKind::TryBlock(_)
| ExprKind::Underscore
| ExprKind::Path(_, _)
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index aad4edaba..801860c21 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -22,9 +22,9 @@ use rustc_errors::{
};
use rustc_span::edit_distance::edit_distance;
use rustc_span::edition::Edition;
-use rustc_span::source_map::{self, Span};
+use rustc_span::source_map;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::DUMMY_SP;
+use rustc_span::{Span, DUMMY_SP};
use std::fmt::Write;
use std::mem;
use thin_vec::{thin_vec, ThinVec};
@@ -122,7 +122,9 @@ impl<'a> Parser<'a> {
) -> PResult<'a, Option<Item>> {
// Don't use `maybe_whole` so that we have precise control
// over when we bump the parser
- if let token::Interpolated(nt) = &self.token.kind && let token::NtItem(item) = &**nt {
+ if let token::Interpolated(nt) = &self.token.kind
+ && let token::NtItem(item) = &**nt
+ {
let mut item = item.clone();
self.bump();
@@ -623,11 +625,10 @@ impl<'a> Parser<'a> {
// `impl<T: Default> impl Default for Wrapper<T>`
// ^^^^^
let extra_impl_kw = ty_first.span.until(bound.span());
- self.sess
- .emit_err(errors::ExtraImplKeywordInTraitImpl {
- extra_impl_kw,
- impl_trait_span: ty_first.span
- });
+ self.sess.emit_err(errors::ExtraImplKeywordInTraitImpl {
+ extra_impl_kw,
+ impl_trait_span: ty_first.span,
+ });
} else {
self.sess.emit_err(errors::ExpectedTraitInTraitImplFoundType {
span: ty_first.span,
@@ -813,7 +814,12 @@ impl<'a> Parser<'a> {
fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
let unsafety = self.parse_unsafety(Case::Sensitive);
// Parse optional `auto` prefix.
- let is_auto = if self.eat_keyword(kw::Auto) { IsAuto::Yes } else { IsAuto::No };
+ let is_auto = if self.eat_keyword(kw::Auto) {
+ self.sess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
+ IsAuto::Yes
+ } else {
+ IsAuto::No
+ };
self.expect_keyword(kw::Trait)?;
let ident = self.parse_ident()?;
@@ -1301,7 +1307,9 @@ impl<'a> Parser<'a> {
// Provide a nice error message if the user placed a where-clause before the item body.
// Users may be tempted to write such code if they are still used to the deprecated
// where-clause location on type aliases and associated types. See also #89122.
- if before_where_clause.has_where_token && let Some(expr) = &expr {
+ if before_where_clause.has_where_token
+ && let Some(expr) = &expr
+ {
self.sess.emit_err(errors::WhereClauseBeforeConstBody {
span: before_where_clause.span,
name: ident.span,
@@ -1944,7 +1952,8 @@ impl<'a> Parser<'a> {
let mut err = self.expected_ident_found_err();
if self.eat_keyword_noexpect(kw::Let)
&& let removal_span = self.prev_token.span.until(self.token.span)
- && let Ok(ident) = self.parse_ident_common(false)
+ && let Ok(ident) = self
+ .parse_ident_common(false)
// Cancel this error, we don't need it.
.map_err(|err| err.cancel())
&& self.token.kind == TokenKind::Colon
@@ -2269,6 +2278,18 @@ impl<'a> Parser<'a> {
err.span_label(ident.span, "while parsing this `fn`");
err.emit();
} else {
+ // check for typo'd Fn* trait bounds such as
+ // fn foo<F>() where F: FnOnce -> () {}
+ if self.token.kind == token::RArrow {
+ let machine_applicable = [sym::FnOnce, sym::FnMut, sym::Fn]
+ .into_iter()
+ .any(|s| self.prev_token.is_ident_named(s));
+
+ err.subdiagnostic(errors::FnTraitMissingParen {
+ span: self.prev_token.span,
+ machine_applicable,
+ });
+ }
return Err(err);
}
}
@@ -2288,9 +2309,9 @@ impl<'a> Parser<'a> {
// `pub` is added in case users got confused with the ordering like `async pub fn`,
// only if it wasn't preceded by `default` as `default pub` is invalid.
let quals: &[Symbol] = if check_pub {
- &[kw::Pub, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
+ &[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
} else {
- &[kw::Const, kw::Async, kw::Unsafe, kw::Extern]
+ &[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
};
self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
// `$qual fn` or `$qual $qual`:
@@ -2344,6 +2365,9 @@ impl<'a> Parser<'a> {
let async_start_sp = self.token.span;
let asyncness = self.parse_asyncness(case);
+ let _gen_start_sp = self.token.span;
+ let genness = self.parse_genness(case);
+
let unsafe_start_sp = self.token.span;
let unsafety = self.parse_unsafety(case);
@@ -2359,6 +2383,10 @@ impl<'a> Parser<'a> {
}
}
+ if let Gen::Yes { span, .. } = genness {
+ self.sess.emit_err(errors::GenFn { span });
+ }
+
if !self.eat_keyword_case(kw::Fn, case) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
@@ -2373,22 +2401,39 @@ impl<'a> Parser<'a> {
Misplaced(Span),
}
+ // We may be able to recover
+ let mut recover_constness = constness;
+ let mut recover_asyncness = asyncness;
+ let mut recover_unsafety = unsafety;
// This will allow the machine fix to directly place the keyword in the correct place or to indicate
// that the keyword is already present and the second instance should be removed.
let wrong_kw = if self.check_keyword(kw::Const) {
match constness {
Const::Yes(sp) => Some(WrongKw::Duplicated(sp)),
- Const::No => Some(WrongKw::Misplaced(async_start_sp)),
+ Const::No => {
+ recover_constness = Const::Yes(self.token.span);
+ Some(WrongKw::Misplaced(async_start_sp))
+ }
}
} else if self.check_keyword(kw::Async) {
match asyncness {
Async::Yes { span, .. } => Some(WrongKw::Duplicated(span)),
- Async::No => Some(WrongKw::Misplaced(unsafe_start_sp)),
+ Async::No => {
+ recover_asyncness = Async::Yes {
+ span: self.token.span,
+ closure_id: DUMMY_NODE_ID,
+ return_impl_trait_id: DUMMY_NODE_ID,
+ };
+ Some(WrongKw::Misplaced(unsafe_start_sp))
+ }
}
} else if self.check_keyword(kw::Unsafe) {
match unsafety {
Unsafe::Yes(sp) => Some(WrongKw::Duplicated(sp)),
- Unsafe::No => Some(WrongKw::Misplaced(ext_start_sp)),
+ Unsafe::No => {
+ recover_unsafety = Unsafe::Yes(self.token.span);
+ Some(WrongKw::Misplaced(ext_start_sp))
+ }
}
} else {
None
@@ -2458,6 +2503,23 @@ impl<'a> Parser<'a> {
}
}
}
+
+ if wrong_kw.is_some()
+ && self.may_recover()
+ && self.look_ahead(1, |tok| tok.is_keyword_case(kw::Fn, case))
+ {
+ // Advance past the misplaced keyword and `fn`
+ self.bump();
+ self.bump();
+ err.emit();
+ return Ok(FnHeader {
+ constness: recover_constness,
+ unsafety: recover_unsafety,
+ asyncness: recover_asyncness,
+ ext,
+ });
+ }
+
return Err(err);
}
}
@@ -2483,11 +2545,23 @@ impl<'a> Parser<'a> {
pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
let mut first_param = true;
// Parse the arguments, starting out with `self` being allowed...
+ if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis)
+ // might be typo'd trait impl, handled elsewhere
+ && !self.token.is_keyword(kw::For)
+ {
+ // recover from missing argument list, e.g. `fn main -> () {}`
+ self.sess
+ .emit_err(errors::MissingFnParams { span: self.prev_token.span.shrink_to_hi() });
+ return Ok(ThinVec::new());
+ }
+
let (mut params, _) = self.parse_paren_comma_seq(|p| {
p.recover_diff_marker();
+ let snapshot = p.create_snapshot_for_diagnostic();
let param = p.parse_param_general(req_name, first_param).or_else(|mut e| {
e.emit();
let lo = p.prev_token.span;
+ p.restore_snapshot(snapshot);
// Skip every token until next possible arg or end.
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
// Create a placeholder argument for proper arg count (issue #34264).
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index e84d8f5b3..1a7ae4069 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -11,6 +11,7 @@ mod stmt;
mod ty;
use crate::lexer::UnmatchedDelim;
+use ast::Gen;
pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason;
@@ -35,8 +36,8 @@ use rustc_errors::{
Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
};
use rustc_session::parse::ParseSess;
-use rustc_span::source_map::{Span, DUMMY_SP};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
+use rustc_span::{Span, DUMMY_SP};
use std::ops::Range;
use std::{mem, slice};
use thin_vec::ThinVec;
@@ -52,6 +53,7 @@ bitflags::bitflags! {
const NO_STRUCT_LITERAL = 1 << 1;
const CONST_EXPR = 1 << 2;
const ALLOW_LET = 1 << 3;
+ const IN_IF_GUARD = 1 << 4;
}
}
@@ -158,8 +160,9 @@ pub struct Parser<'a> {
/// appropriately.
///
/// See the comments in the `parse_path_segment` function for more details.
- unmatched_angle_bracket_count: u32,
- max_angle_bracket_count: u32,
+ unmatched_angle_bracket_count: u16,
+ max_angle_bracket_count: u16,
+ angle_bracket_nesting: u16,
last_unexpected_token_span: Option<Span>,
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
@@ -393,6 +396,7 @@ impl<'a> Parser<'a> {
break_last_token: false,
unmatched_angle_bracket_count: 0,
max_angle_bracket_count: 0,
+ angle_bracket_nesting: 0,
last_unexpected_token_span: None,
subparser_name,
capture_state: CaptureState {
@@ -557,8 +561,9 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
- && let Some((ident, /* is_raw */ false)) = self.token.ident()
- && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
+ && let Some((ident, /* is_raw */ false)) = self.token.ident()
+ && ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
+ {
true
} else {
false
@@ -586,12 +591,10 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
- && let Some((ident, /* is_raw */ false)) = self.token.ident()
- && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
- self.sess.emit_err(errors::KwBadCase {
- span: ident.span,
- kw: kw.as_str()
- });
+ && let Some((ident, /* is_raw */ false)) = self.token.ident()
+ && ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
+ {
+ self.sess.emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
self.bump();
return true;
}
@@ -1126,6 +1129,16 @@ impl<'a> Parser<'a> {
}
}
+ /// Parses genness: `gen` or nothing.
+ fn parse_genness(&mut self, case: Case) -> Gen {
+ if self.token.span.at_least_rust_2024() && self.eat_keyword_case(kw::Gen, case) {
+ let span = self.prev_token.uninterpolated_span();
+ Gen::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
+ } else {
+ Gen::No
+ }
+ }
+
/// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self, case: Case) -> Unsafe {
if self.eat_keyword_case(kw::Unsafe, case) {
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index ff059a7e8..025b0615a 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -115,7 +115,7 @@ impl<'a> Parser<'a> {
Some(item) => NtItem(item),
None => {
return Err(UnexpectedNonterminal::Item(self.token.span)
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(&self.sess.span_diagnostic));
}
},
NonterminalKind::Block => {
@@ -127,7 +127,7 @@ impl<'a> Parser<'a> {
Some(s) => NtStmt(P(s)),
None => {
return Err(UnexpectedNonterminal::Statement(self.token.span)
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(&self.sess.span_diagnostic));
}
},
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
@@ -146,19 +146,15 @@ impl<'a> Parser<'a> {
NonterminalKind::Expr => NtExpr(self.parse_expr_force_collect()?),
NonterminalKind::Literal => {
// The `:literal` matcher does not support attributes
- NtLiteral(
- self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
- )
+ NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?)
}
- NonterminalKind::Ty => NtTy(
- self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?,
- ),
+ NonterminalKind::Ty => {
+ NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?)
+ }
// this could be handled like a token, since it is one
- NonterminalKind::Ident
- if let Some((ident, is_raw)) = get_macro_ident(&self.token) =>
- {
+ NonterminalKind::Ident if let Some((ident, is_raw)) = get_macro_ident(&self.token) => {
self.bump();
NtIdent(ident, is_raw)
}
@@ -166,15 +162,17 @@ impl<'a> Parser<'a> {
return Err(UnexpectedNonterminal::Ident {
span: self.token.span,
token: self.token.clone(),
- }.into_diagnostic(&self.sess.span_diagnostic));
+ }
+ .into_diagnostic(&self.sess.span_diagnostic));
+ }
+ NonterminalKind::Path => {
+ NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?))
}
- NonterminalKind::Path => NtPath(
- P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?),
- ),
NonterminalKind::Meta => NtMeta(P(self.parse_attr_item(true)?)),
- NonterminalKind::Vis => NtVis(
- P(self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?),
- ),
+ NonterminalKind::Vis => {
+ NtVis(P(self
+ .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?))
+ }
NonterminalKind::Lifetime => {
if self.check_lifetime() {
NtLifetime(self.expect_lifetime().ident)
@@ -182,7 +180,8 @@ impl<'a> Parser<'a> {
return Err(UnexpectedNonterminal::Lifetime {
span: self.token.span,
token: self.token.clone(),
- }.into_diagnostic(&self.sess.span_diagnostic));
+ }
+ .into_diagnostic(&self.sess.span_diagnostic));
}
}
};
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 3e4e92789..0a4c7c17d 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -20,8 +20,9 @@ use rustc_ast::{
use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult};
use rustc_session::errors::ExprParenthesesNeeded;
-use rustc_span::source_map::{respan, Span, Spanned};
+use rustc_span::source_map::{respan, Spanned};
use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::Span;
use thin_vec::{thin_vec, ThinVec};
#[derive(PartialEq, Copy, Clone)]
@@ -967,11 +968,12 @@ impl<'a> Parser<'a> {
// check that a comma comes after every field
if !ate_comma {
- let err = ExpectedCommaAfterPatternField { span: self.token.span }
+ let mut err = ExpectedCommaAfterPatternField { span: self.token.span }
.into_diagnostic(&self.sess.span_diagnostic);
if let Some(mut delayed) = delayed_err {
delayed.emit();
}
+ self.recover_misplaced_pattern_modifiers(&fields, &mut err);
return Err(err);
}
ate_comma = false;
@@ -1109,6 +1111,37 @@ impl<'a> Parser<'a> {
Ok((fields, etc))
}
+ /// If the user writes `S { ref field: name }` instead of `S { field: ref name }`, we suggest
+ /// the correct code.
+ fn recover_misplaced_pattern_modifiers(
+ &self,
+ fields: &ThinVec<PatField>,
+ err: &mut DiagnosticBuilder<'a, ErrorGuaranteed>,
+ ) {
+ if let Some(last) = fields.iter().last()
+ && last.is_shorthand
+ && let PatKind::Ident(binding, ident, None) = last.pat.kind
+ && binding != BindingAnnotation::NONE
+ && self.token == token::Colon
+ // We found `ref mut? ident:`, try to parse a `name,` or `name }`.
+ && let Some(name_span) = self.look_ahead(1, |t| t.is_ident().then(|| t.span))
+ && self.look_ahead(2, |t| {
+ t == &token::Comma || t == &token::CloseDelim(Delimiter::Brace)
+ })
+ {
+ let span = last.pat.span.with_hi(ident.span.lo());
+ // We have `S { ref field: name }` instead of `S { field: ref name }`
+ err.multipart_suggestion(
+ "the pattern modifiers belong after the `:`",
+ vec![
+ (span, String::new()),
+ (name_span.shrink_to_lo(), binding.prefix_str().to_string()),
+ ],
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
/// Recover on `...` or `_` as if it were `..` to avoid further errors.
/// See issue #46718.
fn recover_bad_dot_dot(&self) {
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 445516c03..8626dbe40 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -10,8 +10,8 @@ use rustc_ast::{
Path, PathSegment, QSelf,
};
use rustc_errors::{Applicability, IntoDiagnostic, PResult};
-use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{BytePos, Span};
use std::mem;
use thin_vec::ThinVec;
use tracing::debug;
@@ -487,10 +487,24 @@ impl<'a> Parser<'a> {
// Take a snapshot before attempting to parse - we can restore this later.
let snapshot = is_first_invocation.then(|| self.clone());
+ self.angle_bracket_nesting += 1;
debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
match self.parse_angle_args(ty_generics) {
- Ok(args) => Ok(args),
+ Ok(args) => {
+ self.angle_bracket_nesting -= 1;
+ Ok(args)
+ }
+ Err(mut e) if self.angle_bracket_nesting > 10 => {
+ self.angle_bracket_nesting -= 1;
+ // When encountering severely malformed code where there are several levels of
+ // nested unclosed angle args (`f::<f::<f::<f::<...`), we avoid severe O(n^2)
+ // behavior by bailing out earlier (#117080).
+ e.emit();
+ rustc_errors::FatalError.raise();
+ }
Err(e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
+ self.angle_bracket_nesting -= 1;
+
// Swap `self` with our backup of the parser state before attempting to parse
// generic arguments.
let snapshot = mem::replace(self, snapshot.unwrap());
@@ -520,8 +534,8 @@ impl<'a> Parser<'a> {
// Make a span over ${unmatched angle bracket count} characters.
// This is safe because `all_angle_brackets` ensures that there are only `<`s,
// i.e. no multibyte characters, in this range.
- let span =
- lo.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count));
+ let span = lo
+ .with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count.into()));
self.sess.emit_err(errors::UnmatchedAngle {
span,
plural: snapshot.unmatched_angle_bracket_count > 1,
@@ -531,7 +545,10 @@ impl<'a> Parser<'a> {
self.parse_angle_args(ty_generics)
}
}
- Err(e) => Err(e),
+ Err(e) => {
+ self.angle_bracket_nesting -= 1;
+ Err(e)
+ }
}
}
@@ -764,7 +781,8 @@ impl<'a> Parser<'a> {
if let ast::TyKind::Slice(inner_ty) | ast::TyKind::Array(inner_ty, _) = &ty.kind
&& let ast::TyKind::Err = inner_ty.kind
&& let Some(snapshot) = snapshot
- && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+ && let Some(expr) =
+ self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
{
return Ok(Some(self.dummy_const_arg_needs_braces(
self.struct_span_err(expr.span, "invalid const generic expression"),
@@ -776,12 +794,10 @@ impl<'a> Parser<'a> {
}
Err(err) => {
if let Some(snapshot) = snapshot
- && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+ && let Some(expr) =
+ self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
{
- return Ok(Some(self.dummy_const_arg_needs_braces(
- err,
- expr.span,
- )));
+ return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
}
// Try to recover from possible `const` arg without braces.
return self.recover_const_arg(start, err).map(Some);
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 12c267351..aa939a71d 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -20,8 +20,8 @@ use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyl
use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
use rustc_ast::{StmtKind, DUMMY_NODE_ID};
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
-use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{BytePos, Span};
use std::borrow::Cow;
use std::mem;
@@ -52,7 +52,9 @@ impl<'a> Parser<'a> {
// Don't use `maybe_whole` so that we have precise control
// over when we bump the parser
- if let token::Interpolated(nt) = &self.token.kind && let token::NtStmt(stmt) = &**nt {
+ if let token::Interpolated(nt) = &self.token.kind
+ && let token::NtStmt(stmt) = &**nt
+ {
let mut stmt = stmt.clone();
self.bump();
stmt.visit_attrs(|stmt_attrs| {
@@ -227,8 +229,9 @@ impl<'a> Parser<'a> {
/// Also error if the previous token was a doc comment.
fn error_outer_attrs(&self, attrs: AttrWrapper) {
if !attrs.is_empty()
- && let attrs = attrs.take_for_recovery(self.sess)
- && let attrs @ [.., last] = &*attrs {
+ && let attrs = attrs.take_for_recovery(self.sess)
+ && let attrs @ [.., last] = &*attrs
+ {
if last.is_doc_comment() {
self.sess.emit_err(errors::DocCommentDoesNotDocumentAnything {
span: last.span,
@@ -616,34 +619,19 @@ impl<'a> Parser<'a> {
match &mut stmt.kind {
// Expression without semicolon.
StmtKind::Expr(expr)
- if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => {
+ if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
+ {
// Just check for errors and recover; do not eat semicolon yet.
// `expect_one_of` returns PResult<'a, bool /* recovered */>
- let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
+ let expect_result =
+ self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
let replace_with_err = 'break_recover: {
match expect_result {
- // Recover from parser, skip type error to avoid extra errors.
+ // Recover from parser, skip type error to avoid extra errors.
Ok(true) => true,
Err(mut e) => {
- if let TokenKind::DocComment(..) = self.token.kind
- && let Ok(snippet) = self.span_to_snippet(self.token.span)
- {
- let sp = self.token.span;
- let marker = &snippet[..3];
- let (comment_marker, doc_comment_marker) = marker.split_at(2);
-
- e.span_suggestion(
- sp.with_hi(sp.lo() + BytePos(marker.len() as u32)),
- format!(
- "add a space before `{doc_comment_marker}` to use a regular comment",
- ),
- format!("{comment_marker} {doc_comment_marker}"),
- Applicability::MaybeIncorrect,
- );
- }
-
if self.recover_colon_as_semi() {
// recover_colon_as_semi has already emitted a nicer error.
e.delay_as_bug();
@@ -654,14 +642,19 @@ impl<'a> Parser<'a> {
}
match &expr.kind {
- ExprKind::Path(None, ast::Path { segments, .. }) if segments.len() == 1 => {
+ ExprKind::Path(None, ast::Path { segments, .. })
+ if segments.len() == 1 =>
+ {
if self.token == token::Colon
&& self.look_ahead(1, |token| {
- token.is_whole_block() || matches!(
- token.kind,
- token::Ident(kw::For | kw::Loop | kw::While, false)
- | token::OpenDelim(Delimiter::Brace)
- )
+ token.is_whole_block()
+ || matches!(
+ token.kind,
+ token::Ident(
+ kw::For | kw::Loop | kw::While,
+ false
+ ) | token::OpenDelim(Delimiter::Brace)
+ )
})
{
let snapshot = self.create_snapshot_for_diagnostic();
@@ -702,9 +695,8 @@ impl<'a> Parser<'a> {
}
true
-
}
- Ok(false) => false
+ Ok(false) => false,
}
};
@@ -727,7 +719,9 @@ impl<'a> Parser<'a> {
}
eat_semi = false;
}
- StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => eat_semi = false,
+ StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => {
+ eat_semi = false
+ }
}
if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) {
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index a25b0f1f8..be2cbaf30 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -18,9 +18,8 @@ use rustc_ast::{
TraitObjectSyntax, Ty, TyKind,
};
use rustc_errors::{Applicability, PResult};
-use rustc_span::source_map::Span;
use rustc_span::symbol::{kw, sym, Ident};
-use rustc_span::Symbol;
+use rustc_span::{Span, Symbol};
use thin_vec::{thin_vec, ThinVec};
/// Any `?`, `!`, or `~const` modifiers that appear at the start of a bound.
@@ -247,7 +246,7 @@ impl<'a> Parser<'a> {
)?;
FnRetTy::Ty(ty)
} else {
- FnRetTy::Default(self.token.span.shrink_to_lo())
+ FnRetTy::Default(self.prev_token.span.shrink_to_hi())
})
}
@@ -893,13 +892,15 @@ impl<'a> Parser<'a> {
// to recover from errors, not make more).
let path = if self.may_recover() {
let (span, message, sugg, path, applicability) = match &ty.kind {
- TyKind::Ptr(..) | TyKind::Ref(..) if let TyKind::Path(_, path) = &ty.peel_refs().kind => {
+ TyKind::Ptr(..) | TyKind::Ref(..)
+ if let TyKind::Path(_, path) = &ty.peel_refs().kind =>
+ {
(
ty.span.until(path.span),
"consider removing the indirection",
"",
path,
- Applicability::MaybeIncorrect
+ Applicability::MaybeIncorrect,
)
}
TyKind::ImplTrait(_, bounds)
@@ -910,10 +911,10 @@ impl<'a> Parser<'a> {
"use the trait bounds directly",
"",
&tr.trait_ref.path,
- Applicability::MachineApplicable
+ Applicability::MachineApplicable,
)
}
- _ => return Err(err)
+ _ => return Err(err),
};
err.span_suggestion_verbose(span, message, sugg, applicability);
@@ -1027,7 +1028,8 @@ impl<'a> Parser<'a> {
args.into_iter()
.filter_map(|arg| {
if let ast::AngleBracketedArg::Arg(generic_arg) = arg
- && let ast::GenericArg::Lifetime(lifetime) = generic_arg {
+ && let ast::GenericArg::Lifetime(lifetime) = generic_arg
+ {
Some(lifetime)
} else {
None