summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs101
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs28
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs171
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs90
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs3
-rw-r--r--compiler/rustc_parse/src/lib.rs20
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs9
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs20
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs343
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs614
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs2
-rw-r--r--compiler/rustc_parse/src/parser/item.rs153
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs138
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs25
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs62
-rw-r--r--compiler/rustc_parse/src/parser/path.rs6
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs59
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs51
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs69
19 files changed, 1293 insertions, 671 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 8ab1ec298..008adcc83 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -2,7 +2,7 @@ use std::borrow::Cow;
use rustc_ast::token::Token;
use rustc_ast::{Path, Visibility};
-use rustc_errors::{AddToDiagnostic, Applicability, EmissionGuarantee, IntoDiagnostic};
+use rustc_errors::{AddToDiagnostic, Applicability, ErrorGuaranteed, IntoDiagnostic};
use rustc_macros::{Diagnostic, Subdiagnostic};
use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::edition::{Edition, LATEST_STABLE_EDITION};
@@ -138,6 +138,14 @@ pub(crate) enum InvalidVariableDeclarationSub {
}
#[derive(Diagnostic)]
+#[diag(parse_switch_ref_box_order)]
+pub(crate) struct SwitchRefBoxOrder {
+ #[primary_span]
+ #[suggestion(applicability = "machine-applicable", code = "box ref")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
#[diag(parse_invalid_comparison_operator)]
pub(crate) struct InvalidComparisonOperator {
#[primary_span]
@@ -407,6 +415,32 @@ pub(crate) struct ExpectedExpressionFoundLet {
pub span: Span,
#[subdiagnostic]
pub reason: ForbiddenLetReason,
+ #[subdiagnostic]
+ pub missing_let: Option<MaybeMissingLet>,
+ #[subdiagnostic]
+ pub comparison: Option<MaybeComparison>,
+}
+
+#[derive(Subdiagnostic, Clone, Copy)]
+#[multipart_suggestion(
+ parse_maybe_missing_let,
+ applicability = "maybe-incorrect",
+ style = "verbose"
+)]
+pub(crate) struct MaybeMissingLet {
+ #[suggestion_part(code = "let ")]
+ pub span: Span,
+}
+
+#[derive(Subdiagnostic, Clone, Copy)]
+#[multipart_suggestion(
+ parse_maybe_comparison,
+ applicability = "maybe-incorrect",
+ style = "verbose"
+)]
+pub(crate) struct MaybeComparison {
+ #[suggestion_part(code = "=")]
+ pub span: Span,
}
#[derive(Diagnostic)]
@@ -1004,15 +1038,15 @@ pub(crate) struct ExpectedIdentifier {
pub help_cannot_start_number: Option<HelpIdentifierStartsWithNumber>,
}
-impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
+impl<'a> IntoDiagnostic<'a> for ExpectedIdentifier {
#[track_caller]
fn into_diagnostic(
self,
- handler: &'a rustc_errors::Handler,
- ) -> rustc_errors::DiagnosticBuilder<'a, G> {
+ dcx: &'a rustc_errors::DiagCtxt,
+ ) -> rustc_errors::DiagnosticBuilder<'a, ErrorGuaranteed> {
let token_descr = TokenDescription::from_token(&self.token);
- let mut diag = handler.struct_diagnostic(match token_descr {
+ let mut diag = dcx.struct_err(match token_descr {
Some(TokenDescription::ReservedIdentifier) => {
fluent::parse_expected_identifier_found_reserved_identifier_str
}
@@ -1061,15 +1095,15 @@ pub(crate) struct ExpectedSemi {
pub sugg: ExpectedSemiSugg,
}
-impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
+impl<'a> IntoDiagnostic<'a> for ExpectedSemi {
#[track_caller]
fn into_diagnostic(
self,
- handler: &'a rustc_errors::Handler,
- ) -> rustc_errors::DiagnosticBuilder<'a, G> {
+ dcx: &'a rustc_errors::DiagCtxt,
+ ) -> rustc_errors::DiagnosticBuilder<'a, ErrorGuaranteed> {
let token_descr = TokenDescription::from_token(&self.token);
- let mut diag = handler.struct_diagnostic(match token_descr {
+ let mut diag = dcx.struct_err(match token_descr {
Some(TokenDescription::ReservedIdentifier) => {
fluent::parse_expected_semi_found_reserved_identifier_str
}
@@ -1241,12 +1275,28 @@ pub(crate) struct ParenthesesInForHead {
#[derive(Subdiagnostic)]
#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
pub(crate) struct ParenthesesInForHeadSugg {
- #[suggestion_part(code = "{left_snippet}")]
+ #[suggestion_part(code = " ")]
pub left: Span,
- pub left_snippet: String,
- #[suggestion_part(code = "{right_snippet}")]
+ #[suggestion_part(code = " ")]
+ pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_parentheses_in_match_arm_pattern)]
+pub(crate) struct ParenthesesInMatchPat {
+ #[primary_span]
+ pub span: Vec<Span>,
+ #[subdiagnostic]
+ pub sugg: ParenthesesInMatchPatSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ParenthesesInMatchPatSugg {
+ #[suggestion_part(code = "")]
+ pub left: Span,
+ #[suggestion_part(code = "")]
pub right: Span,
- pub right_snippet: String,
}
#[derive(Diagnostic)]
@@ -1676,7 +1726,7 @@ pub(crate) struct ExternItemCannotBeConst {
#[primary_span]
pub ident_span: Span,
#[suggestion(code = "static ", applicability = "machine-applicable")]
- pub const_span: Span,
+ pub const_span: Option<Span>,
}
#[derive(Diagnostic)]
@@ -2278,9 +2328,8 @@ pub(crate) enum InvalidMutInPattern {
#[note(parse_note_mut_pattern_usage)]
NonIdent {
#[primary_span]
- #[suggestion(code = "{pat}", applicability = "machine-applicable")]
+ #[suggestion(code = "", applicability = "machine-applicable")]
span: Span,
- pat: String,
},
}
@@ -2828,3 +2877,23 @@ pub(crate) struct GenericArgsInPatRequireTurbofishSyntax {
)]
pub suggest_turbofish: Span,
}
+
+#[derive(Diagnostic)]
+#[diag(parse_transpose_dyn_or_impl)]
+pub(crate) struct TransposeDynOrImpl<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub kw: &'a str,
+ #[subdiagnostic]
+ pub sugg: TransposeDynOrImplSugg<'a>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct TransposeDynOrImplSugg<'a> {
+ #[suggestion_part(code = "")]
+ pub removal_span: Span,
+ #[suggestion_part(code = "{kw} ")]
+ pub insertion_span: Span,
+ pub kw: &'a str,
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index f2eed5c9b..a91fbdff4 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -67,7 +67,7 @@ pub(crate) fn parse_token_trees<'a>(
let (stream, res, unmatched_delims) =
tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
match res {
- Ok(()) if unmatched_delims.is_empty() => Ok(stream),
+ Ok(_open_spacing) if unmatched_delims.is_empty() => Ok(stream),
_ => {
// Return error if there are unmatched delimiters or unclosed delimiters.
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
@@ -75,7 +75,7 @@ pub(crate) fn parse_token_trees<'a>(
let mut buffer = Vec::with_capacity(1);
for unmatched in unmatched_delims {
- if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
+ if let Some(err) = make_unclosed_delims_error(unmatched, sess) {
err.buffer(&mut buffer);
}
}
@@ -230,7 +230,7 @@ impl<'a> StringReader<'a> {
let string = self.str_from(suffix_start);
if string == "_" {
self.sess
- .span_diagnostic
+ .dcx
.emit_err(errors::UnderscoreLiteralSuffix { span: self.mk_sp(suffix_start, self.pos) });
None
} else {
@@ -349,7 +349,7 @@ impl<'a> StringReader<'a> {
c: char,
) -> DiagnosticBuilder<'a, !> {
self.sess
- .span_diagnostic
+ .dcx
.struct_span_fatal(self.mk_sp(from_pos, to_pos), format!("{}: {}", m, escaped_char(c)))
}
@@ -362,7 +362,7 @@ impl<'a> StringReader<'a> {
if contains_text_flow_control_chars(content) {
let span = self.mk_sp(start, self.pos);
self.sess.buffer_lint_with_diagnostic(
- &TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
+ TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
span,
ast::CRATE_NODE_ID,
"unicode codepoint changing visible direction of text present in comment",
@@ -406,7 +406,7 @@ impl<'a> StringReader<'a> {
match kind {
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
- self.sess.span_diagnostic.span_fatal_with_code(
+ self.sess.dcx.span_fatal_with_code(
self.mk_sp(start, end),
"unterminated character literal",
error_code!(E0762),
@@ -416,7 +416,7 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
- self.sess.span_diagnostic.span_fatal_with_code(
+ self.sess.dcx.span_fatal_with_code(
self.mk_sp(start + BytePos(1), end),
"unterminated byte constant",
error_code!(E0763),
@@ -426,7 +426,7 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
- self.sess.span_diagnostic.span_fatal_with_code(
+ self.sess.dcx.span_fatal_with_code(
self.mk_sp(start, end),
"unterminated double quote string",
error_code!(E0765),
@@ -436,7 +436,7 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::ByteStr { terminated } => {
if !terminated {
- self.sess.span_diagnostic.span_fatal_with_code(
+ self.sess.dcx.span_fatal_with_code(
self.mk_sp(start + BytePos(1), end),
"unterminated double quote byte string",
error_code!(E0766),
@@ -446,7 +446,7 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::CStr { terminated } => {
if !terminated {
- self.sess.span_diagnostic.span_fatal_with_code(
+ self.sess.dcx.span_fatal_with_code(
self.mk_sp(start + BytePos(1), end),
"unterminated C string",
error_code!(E0767),
@@ -581,7 +581,7 @@ impl<'a> StringReader<'a> {
possible_offset: Option<u32>,
found_terminators: u32,
) -> ! {
- let mut err = self.sess.span_diagnostic.struct_span_fatal_with_code(
+ let mut err = self.sess.dcx.struct_span_fatal_with_code(
self.mk_sp(start, start),
"unterminated raw string",
error_code!(E0748),
@@ -617,7 +617,7 @@ impl<'a> StringReader<'a> {
None => "unterminated block comment",
};
let last_bpos = self.pos;
- let mut err = self.sess.span_diagnostic.struct_span_fatal_with_code(
+ let mut err = self.sess.dcx.struct_span_fatal_with_code(
self.mk_sp(start, last_bpos),
msg,
error_code!(E0758),
@@ -683,7 +683,7 @@ impl<'a> StringReader<'a> {
} else {
// Before Rust 2021, only emit a lint for migration.
self.sess.buffer_lint_with_diagnostic(
- &RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
+ RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
prefix_span,
ast::CRATE_NODE_ID,
format!("prefix `{prefix}` is unknown"),
@@ -722,7 +722,7 @@ impl<'a> StringReader<'a> {
has_fatal_err = true;
}
emit_unescape_error(
- &self.sess.span_diagnostic,
+ &self.sess.dcx,
lit_content,
span_with_quotes,
span,
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index 31d91fe80..2bc2789a4 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -3,9 +3,10 @@ use super::diagnostics::same_indentation_level;
use super::diagnostics::TokenTreeDiagInfo;
use super::{StringReader, UnmatchedDelim};
use rustc_ast::token::{self, Delimiter, Token};
-use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_ast_pretty::pprust::token_to_string;
-use rustc_errors::PErr;
+use rustc_errors::{Applicability, PErr};
+use rustc_span::symbol::kw;
pub(super) struct TokenTreesReader<'a> {
string_reader: StringReader<'a>,
@@ -24,54 +25,46 @@ impl<'a> TokenTreesReader<'a> {
token: Token::dummy(),
diag_info: TokenTreeDiagInfo::default(),
};
- let (stream, res) = tt_reader.parse_token_trees(/* is_delimited */ false);
+ let (_open_spacing, stream, res) =
+ tt_reader.parse_token_trees(/* is_delimited */ false);
(stream, res, tt_reader.diag_info.unmatched_delims)
}
- // Parse a stream of tokens into a list of `TokenTree`s.
+ // Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
+ // the result is that of the opening delimiter.
fn parse_token_trees(
&mut self,
is_delimited: bool,
- ) -> (TokenStream, Result<(), Vec<PErr<'a>>>) {
- self.token = self.string_reader.next_token().0;
+ ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'a>>>) {
+ // Move past the opening delimiter.
+ let (_, open_spacing) = self.bump(false);
+
let mut buf = Vec::new();
loop {
match self.token.kind {
token::OpenDelim(delim) => {
buf.push(match self.parse_token_tree_open_delim(delim) {
Ok(val) => val,
- Err(errs) => return (TokenStream::new(buf), Err(errs)),
+ Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
})
}
token::CloseDelim(delim) => {
return (
+ open_spacing,
TokenStream::new(buf),
if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
);
}
token::Eof => {
return (
+ open_spacing,
TokenStream::new(buf),
if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
);
}
_ => {
- // Get the next normal token. This might require getting multiple adjacent
- // single-char tokens and joining them together.
- let (this_spacing, next_tok) = loop {
- let (next_tok, is_next_tok_preceded_by_whitespace) =
- self.string_reader.next_token();
- if is_next_tok_preceded_by_whitespace {
- break (Spacing::Alone, next_tok);
- } else if let Some(glued) = self.token.glue(&next_tok) {
- self.token = glued;
- } else {
- let this_spacing =
- if next_tok.is_punct() { Spacing::Joint } else { Spacing::Alone };
- break (this_spacing, next_tok);
- }
- };
- let this_tok = std::mem::replace(&mut self.token, next_tok);
+ // Get the next normal token.
+ let (this_tok, this_spacing) = self.bump(true);
buf.push(TokenTree::Token(this_tok, this_spacing));
}
}
@@ -80,7 +73,7 @@ impl<'a> TokenTreesReader<'a> {
fn eof_err(&mut self) -> PErr<'a> {
let msg = "this file contains an unclosed delimiter";
- let mut err = self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
+ let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
for &(_, sp) in &self.diag_info.open_braces {
err.span_label(sp, "unclosed delimiter");
self.diag_info.unmatched_delims.push(UnmatchedDelim {
@@ -96,7 +89,7 @@ impl<'a> TokenTreesReader<'a> {
report_suspicious_mismatch_block(
&mut err,
&self.diag_info,
- &self.string_reader.sess.source_map(),
+ self.string_reader.sess.source_map(),
*delim,
)
}
@@ -115,32 +108,16 @@ impl<'a> TokenTreesReader<'a> {
// Parse the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user
// uses an incorrect delimiter.
- let (tts, res) = self.parse_token_trees(/* is_delimited */ true);
- if let Err(mut errs) = res {
- // If there are unclosed delims, see if there are diff markers and if so, point them
- // out instead of complaining about the unclosed delims.
- let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
- let mut diff_errs = vec![];
- while parser.token != token::Eof {
- if let Err(diff_err) = parser.err_diff_marker() {
- diff_errs.push(diff_err);
- }
- parser.bump();
- }
- if !diff_errs.is_empty() {
- errs.iter_mut().for_each(|err| {
- err.delay_as_bug();
- });
- return Err(diff_errs);
- }
- return Err(errs);
+ let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
+ if let Err(errs) = res {
+ return Err(self.unclosed_delim_err(tts, errs));
}
// Expand to cover the entire delimited token tree
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
let sm = self.string_reader.sess.source_map();
- match self.token.kind {
+ let close_spacing = match self.token.kind {
// Correct delimiter.
token::CloseDelim(close_delim) if close_delim == open_delim => {
let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
@@ -162,7 +139,7 @@ impl<'a> TokenTreesReader<'a> {
}
// Move past the closing delimiter.
- self.token = self.string_reader.next_token().0;
+ self.bump(false).1
}
// Incorrect delimiter.
token::CloseDelim(close_delim) => {
@@ -179,7 +156,7 @@ impl<'a> TokenTreesReader<'a> {
unclosed_delimiter = Some(sp);
};
for (brace, brace_span) in &self.diag_info.open_braces {
- if same_indentation_level(&sm, self.token.span, *brace_span)
+ if same_indentation_level(sm, self.token.span, *brace_span)
&& brace == &close_delim
{
// high likelihood of these two corresponding
@@ -206,18 +183,106 @@ impl<'a> TokenTreesReader<'a> {
// bar(baz(
// } // Incorrect delimiter but matches the earlier `{`
if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
- self.token = self.string_reader.next_token().0;
+ self.bump(false).1
+ } else {
+ // The choice of value here doesn't matter.
+ Spacing::Alone
}
}
token::Eof => {
// Silently recover, the EOF token will be seen again
// and an error emitted then. Thus we don't pop from
- // self.open_braces here.
+ // self.open_braces here. The choice of spacing value here
+ // doesn't matter.
+ Spacing::Alone
}
_ => unreachable!(),
- }
+ };
+
+ let spacing = DelimSpacing::new(open_spacing, close_spacing);
+
+ Ok(TokenTree::Delimited(delim_span, spacing, open_delim, tts))
+ }
+
+ // Move on to the next token, returning the current token and its spacing.
+ // Will glue adjacent single-char tokens together if `glue` is set.
+ fn bump(&mut self, glue: bool) -> (Token, Spacing) {
+ let (this_spacing, next_tok) = loop {
+ let (next_tok, is_next_tok_preceded_by_whitespace) = self.string_reader.next_token();
- Ok(TokenTree::Delimited(delim_span, open_delim, tts))
+ if is_next_tok_preceded_by_whitespace {
+ break (Spacing::Alone, next_tok);
+ } else if glue && let Some(glued) = self.token.glue(&next_tok) {
+ self.token = glued;
+ } else {
+ let this_spacing = if next_tok.is_punct() {
+ Spacing::Joint
+ } else if next_tok.kind == token::Eof {
+ Spacing::Alone
+ } else {
+ Spacing::JointHidden
+ };
+ break (this_spacing, next_tok);
+ }
+ };
+ let this_tok = std::mem::replace(&mut self.token, next_tok);
+ (this_tok, this_spacing)
+ }
+
+ fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
+ // If there are unclosed delims, see if there are diff markers and if so, point them
+ // out instead of complaining about the unclosed delims.
+ let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
+ let mut diff_errs = vec![];
+ // Suggest removing a `{` we think appears in an `if`/`while` condition
+ // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
+ // we have no way of tracking this in the lexer itself, so we piggyback on the parser
+ let mut in_cond = false;
+ while parser.token != token::Eof {
+ if let Err(diff_err) = parser.err_diff_marker() {
+ diff_errs.push(diff_err);
+ } else if parser.is_keyword_ahead(0, &[kw::If, kw::While]) {
+ in_cond = true;
+ } else if matches!(
+ parser.token.kind,
+ token::CloseDelim(Delimiter::Brace) | token::FatArrow
+ ) {
+ // end of the `if`/`while` body, or the end of a `match` guard
+ in_cond = false;
+ } else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
+ // Store the `&&` and `let` to use their spans later when creating the diagnostic
+ let maybe_andand = parser.look_ahead(1, |t| t.clone());
+ let maybe_let = parser.look_ahead(2, |t| t.clone());
+ if maybe_andand == token::OpenDelim(Delimiter::Brace) {
+ // This might be the beginning of the `if`/`while` body (i.e., the end of the condition)
+ in_cond = false;
+ } else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
+ let mut err = parser.struct_span_err(
+ parser.token.span,
+ "found a `{` in the middle of a let-chain",
+ );
+ err.span_suggestion(
+ parser.token.span,
+ "consider removing this brace to parse the `let` as part of the same chain",
+ "",
+ Applicability::MachineApplicable,
+ );
+ err.span_label(
+ maybe_andand.span.to(maybe_let.span),
+ "you might have meant to continue the let-chain here",
+ );
+ errs.push(err);
+ }
+ }
+ parser.bump();
+ }
+ if !diff_errs.is_empty() {
+ errs.iter_mut().for_each(|err| {
+ err.delay_as_bug();
+ });
+ return diff_errs;
+ }
+ return errs;
}
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
@@ -225,12 +290,12 @@ impl<'a> TokenTreesReader<'a> {
// matching opening delimiter).
let token_str = token_to_string(&self.token);
let msg = format!("unexpected closing delimiter: `{token_str}`");
- let mut err = self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
+ let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
report_suspicious_mismatch_block(
&mut err,
&self.diag_info,
- &self.string_reader.sess.source_map(),
+ self.string_reader.sess.source_map(),
delim,
);
err.span_label(self.token.span, "unexpected closing delimiter");
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index b659c40b2..775082adb 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -3,20 +3,20 @@
use std::iter::once;
use std::ops::Range;
-use rustc_errors::{Applicability, Handler};
+use rustc_errors::{Applicability, DiagCtxt};
use rustc_lexer::unescape::{EscapeError, Mode};
use rustc_span::{BytePos, Span};
use crate::errors::{MoreThanOneCharNote, MoreThanOneCharSugg, NoBraceUnicodeSub, UnescapeError};
pub(crate) fn emit_unescape_error(
- handler: &Handler,
- // interior part of the literal, without quotes
+ dcx: &DiagCtxt,
+ // interior part of the literal, between quotes
lit: &str,
- // full span of the literal, including quotes
- span_with_quotes: Span,
- // interior span of the literal, without quotes
- span: Span,
+ // full span of the literal, including quotes and any prefix
+ full_lit_span: Span,
+ // span of the error part of the literal
+ err_span: Span,
mode: Mode,
// range of the error inside `lit`
range: Range<usize>,
@@ -24,19 +24,19 @@ pub(crate) fn emit_unescape_error(
) {
debug!(
"emit_unescape_error: {:?}, {:?}, {:?}, {:?}, {:?}",
- lit, span_with_quotes, mode, range, error
+ lit, full_lit_span, mode, range, error
);
let last_char = || {
let c = lit[range.clone()].chars().next_back().unwrap();
- let span = span.with_lo(span.hi() - BytePos(c.len_utf8() as u32));
+ let span = err_span.with_lo(err_span.hi() - BytePos(c.len_utf8() as u32));
(c, span)
};
match error {
EscapeError::LoneSurrogateUnicodeEscape => {
- handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: true });
+ dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: true });
}
EscapeError::OutOfRangeUnicodeEscape => {
- handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: false });
+ dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: false });
}
EscapeError::MoreThanOneChar => {
use unicode_normalization::{char::is_combining_mark, UnicodeNormalization};
@@ -49,12 +49,16 @@ pub(crate) fn emit_unescape_error(
let normalized = lit.nfc().to_string();
if normalized.chars().count() == 1 {
let ch = normalized.chars().next().unwrap().escape_default().to_string();
- sugg = Some(MoreThanOneCharSugg::NormalizedForm { span, ch, normalized });
+ sugg = Some(MoreThanOneCharSugg::NormalizedForm {
+ span: err_span,
+ ch,
+ normalized,
+ });
}
let escaped_marks =
rest.iter().map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
note = Some(MoreThanOneCharNote::AllCombining {
- span,
+ span: err_span,
chr: format!("{first}"),
len: escaped_marks.len(),
escaped_marks: escaped_marks.join(""),
@@ -69,10 +73,12 @@ pub(crate) fn emit_unescape_error(
.collect();
if let &[ch] = printable.as_slice() {
- sugg =
- Some(MoreThanOneCharSugg::RemoveNonPrinting { span, ch: ch.to_string() });
+ sugg = Some(MoreThanOneCharSugg::RemoveNonPrinting {
+ span: err_span,
+ ch: ch.to_string(),
+ });
note = Some(MoreThanOneCharNote::NonPrinting {
- span,
+ span: err_span,
escaped: lit.escape_default().to_string(),
});
}
@@ -91,21 +97,21 @@ pub(crate) fn emit_unescape_error(
}
let sugg = format!("{prefix}\"{escaped}\"");
MoreThanOneCharSugg::Quotes {
- span: span_with_quotes,
+ span: full_lit_span,
is_byte: mode == Mode::Byte,
sugg,
}
});
- handler.emit_err(UnescapeError::MoreThanOneChar {
- span: span_with_quotes,
+ dcx.emit_err(UnescapeError::MoreThanOneChar {
+ span: full_lit_span,
note,
suggestion: sugg,
});
}
EscapeError::EscapeOnlyChar => {
let (c, char_span) = last_char();
- handler.emit_err(UnescapeError::EscapeOnlyChar {
- span,
+ dcx.emit_err(UnescapeError::EscapeOnlyChar {
+ span: err_span,
char_span,
escaped_sugg: c.escape_default().to_string(),
escaped_msg: escaped_char(c),
@@ -114,11 +120,11 @@ pub(crate) fn emit_unescape_error(
}
EscapeError::BareCarriageReturn => {
let double_quotes = mode.in_double_quotes();
- handler.emit_err(UnescapeError::BareCr { span, double_quotes });
+ dcx.emit_err(UnescapeError::BareCr { span: err_span, double_quotes });
}
EscapeError::BareCarriageReturnInRawString => {
assert!(mode.in_double_quotes());
- handler.emit_err(UnescapeError::BareCrRawString(span));
+ dcx.emit_err(UnescapeError::BareCrRawString(err_span));
}
EscapeError::InvalidEscape => {
let (c, span) = last_char();
@@ -129,7 +135,7 @@ pub(crate) fn emit_unescape_error(
"unknown character escape"
};
let ec = escaped_char(c);
- let mut diag = handler.struct_span_err(span, format!("{label}: `{ec}`"));
+ let mut diag = dcx.struct_span_err(span, format!("{label}: `{ec}`"));
diag.span_label(span, label);
if c == '{' || c == '}' && matches!(mode, Mode::Str | Mode::RawStr) {
diag.help(
@@ -143,7 +149,7 @@ pub(crate) fn emit_unescape_error(
} else {
if mode == Mode::Str || mode == Mode::Char {
diag.span_suggestion(
- span_with_quotes,
+ full_lit_span,
"if you meant to write a literal backslash (perhaps escaping in a regular expression), consider a raw string literal",
format!("r\"{lit}\""),
Applicability::MaybeIncorrect,
@@ -158,13 +164,13 @@ pub(crate) fn emit_unescape_error(
diag.emit();
}
EscapeError::TooShortHexEscape => {
- handler.emit_err(UnescapeError::TooShortHexEscape(span));
+ dcx.emit_err(UnescapeError::TooShortHexEscape(err_span));
}
EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => {
let (c, span) = last_char();
let is_hex = error == EscapeError::InvalidCharInHexEscape;
let ch = escaped_char(c);
- handler.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch });
+ dcx.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch });
}
EscapeError::NonAsciiCharInByte => {
let (c, span) = last_char();
@@ -174,7 +180,7 @@ pub(crate) fn emit_unescape_error(
Mode::RawByteStr => "raw byte string literal",
_ => panic!("non-is_byte literal paired with NonAsciiCharInByte"),
};
- let mut err = handler.struct_span_err(span, format!("non-ASCII character in {desc}"));
+ let mut err = dcx.struct_span_err(span, format!("non-ASCII character in {desc}"));
let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
format!(" but is {c:?}")
} else {
@@ -210,20 +216,20 @@ pub(crate) fn emit_unescape_error(
err.emit();
}
EscapeError::OutOfRangeHexEscape => {
- handler.emit_err(UnescapeError::OutOfRangeHexEscape(span));
+ dcx.emit_err(UnescapeError::OutOfRangeHexEscape(err_span));
}
EscapeError::LeadingUnderscoreUnicodeEscape => {
let (c, span) = last_char();
- handler.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
+ dcx.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
span,
ch: escaped_char(c),
});
}
EscapeError::OverlongUnicodeEscape => {
- handler.emit_err(UnescapeError::OverlongUnicodeEscape(span));
+ dcx.emit_err(UnescapeError::OverlongUnicodeEscape(err_span));
}
EscapeError::UnclosedUnicodeEscape => {
- handler.emit_err(UnescapeError::UnclosedUnicodeEscape(span, span.shrink_to_hi()));
+ dcx.emit_err(UnescapeError::UnclosedUnicodeEscape(err_span, err_span.shrink_to_hi()));
}
EscapeError::NoBraceInUnicodeEscape => {
let mut suggestion = "\\u{".to_owned();
@@ -238,34 +244,34 @@ pub(crate) fn emit_unescape_error(
let (label, sub) = if suggestion_len > 0 {
suggestion.push('}');
let hi = char_span.lo() + BytePos(suggestion_len as u32);
- (None, NoBraceUnicodeSub::Suggestion { span: span.with_hi(hi), suggestion })
+ (None, NoBraceUnicodeSub::Suggestion { span: err_span.with_hi(hi), suggestion })
} else {
- (Some(span), NoBraceUnicodeSub::Help)
+ (Some(err_span), NoBraceUnicodeSub::Help)
};
- handler.emit_err(UnescapeError::NoBraceInUnicodeEscape { span, label, sub });
+ dcx.emit_err(UnescapeError::NoBraceInUnicodeEscape { span: err_span, label, sub });
}
EscapeError::UnicodeEscapeInByte => {
- handler.emit_err(UnescapeError::UnicodeEscapeInByte(span));
+ dcx.emit_err(UnescapeError::UnicodeEscapeInByte(err_span));
}
EscapeError::EmptyUnicodeEscape => {
- handler.emit_err(UnescapeError::EmptyUnicodeEscape(span));
+ dcx.emit_err(UnescapeError::EmptyUnicodeEscape(err_span));
}
EscapeError::ZeroChars => {
- handler.emit_err(UnescapeError::ZeroChars(span));
+ dcx.emit_err(UnescapeError::ZeroChars(err_span));
}
EscapeError::LoneSlash => {
- handler.emit_err(UnescapeError::LoneSlash(span));
+ dcx.emit_err(UnescapeError::LoneSlash(err_span));
}
EscapeError::UnskippedWhitespaceWarning => {
let (c, char_span) = last_char();
- handler.emit_warning(UnescapeError::UnskippedWhitespace {
- span,
+ dcx.emit_warning(UnescapeError::UnskippedWhitespace {
+ span: err_span,
ch: escaped_char(c),
char_span,
});
}
EscapeError::MultipleSkippedLinesWarning => {
- handler.emit_warning(UnescapeError::MultipleSkippedLinesWarning(span));
+ dcx.emit_warning(UnescapeError::MultipleSkippedLinesWarning(err_span));
}
}
}
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index bbfb160eb..dac7569e3 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -350,8 +350,7 @@ pub(super) fn check_for_substitution(
let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
let msg = format!("substitution character not found for '{ch}'");
- reader.sess.span_diagnostic.span_bug_no_panic(span, msg);
- return (None, None);
+ reader.sess.dcx.span_bug(span, msg);
};
// special help suggestion for "directed" double quotes
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index c012a8663..82b0ff70c 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -20,8 +20,6 @@ use rustc_ast::{AttrItem, Attribute, MetaItem};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
-use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage};
-use rustc_fluent_macro::fluent_messages;
use rustc_session::parse::ParseSess;
use rustc_span::{FileName, SourceFile, Span};
@@ -37,7 +35,7 @@ pub mod validate_attr;
mod errors;
-fluent_messages! { "../messages.ftl" }
+rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
// A bunch of utility functions of the form `parse_<thing>_from_<source>`
// where <thing> includes crate, expr, item, stmt, tts, and one that
@@ -53,8 +51,8 @@ macro_rules! panictry_buffer {
match $e {
Ok(e) => e,
Err(errs) => {
- for mut e in errs {
- $handler.emit_diagnostic(&mut e);
+ for e in errs {
+ $handler.emit_diagnostic(e);
}
FatalError.raise()
}
@@ -102,7 +100,7 @@ pub fn parse_stream_from_source_str(
/// Creates a new parser from a source string.
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
- panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
+ panictry_buffer!(&sess.dcx, maybe_new_parser_from_source_str(sess, name, source))
}
/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
@@ -123,7 +121,7 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Spa
/// Given a session and a `source_file`, returns a parser.
fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
- panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file))
+ panictry_buffer!(&sess.dcx, maybe_source_file_to_parser(sess, source_file))
}
/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing the
@@ -167,8 +165,8 @@ fn try_file_to_source_file(
fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> {
match try_file_to_source_file(sess, path, spanopt) {
Ok(source_file) => source_file,
- Err(mut d) => {
- sess.span_diagnostic.emit_diagnostic(&mut d);
+ Err(d) => {
+ sess.dcx.emit_diagnostic(d);
FatalError.raise();
}
}
@@ -180,7 +178,7 @@ pub fn source_file_to_stream(
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> TokenStream {
- panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
+ panictry_buffer!(&sess.dcx, maybe_file_to_stream(sess, source_file, override_span))
}
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
@@ -191,7 +189,7 @@ pub fn maybe_file_to_stream(
override_span: Option<Span>,
) -> Result<TokenStream, Vec<Diagnostic>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
- sess.span_diagnostic.bug(format!(
+ sess.dcx.bug(format!(
"cannot lex `source_file` without source: {}",
sess.source_map().filename_for_diagnostics(&source_file.name)
));
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 104de47b9..56e52baf9 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -7,7 +7,6 @@ use rustc_ast::attr;
use rustc_ast::token::{self, Delimiter, Nonterminal};
use rustc_errors::{error_code, Diagnostic, IntoDiagnostic, PResult};
use rustc_span::{sym, BytePos, Span};
-use std::convert::TryInto;
use thin_vec::ThinVec;
use tracing::debug;
@@ -56,7 +55,7 @@ impl<'a> Parser<'a> {
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
if attr_style != ast::AttrStyle::Outer {
let span = self.token.span;
- let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
+ let mut err = self.dcx().struct_span_err_with_code(
span,
fluent::parse_inner_doc_comment_not_permitted,
error_code!(E0753),
@@ -249,7 +248,7 @@ impl<'a> Parser<'a> {
/// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
let item = match &self.token.kind {
- token::Interpolated(nt) => match &**nt {
+ token::Interpolated(nt) => match &nt.0 {
Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
_ => None,
},
@@ -369,7 +368,7 @@ impl<'a> Parser<'a> {
/// ```
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match &self.token.kind {
- token::Interpolated(nt) => match &**nt {
+ token::Interpolated(nt) => match &nt.0 {
token::NtMeta(e) => Some(e.clone()),
_ => None,
},
@@ -418,7 +417,7 @@ impl<'a> Parser<'a> {
}
Err(InvalidMetaItem { span: self.token.span, token: self.token.clone() }
- .into_diagnostic(&self.sess.span_diagnostic))
+ .into_diagnostic(self.dcx()))
}
}
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index c4e8d9006..2307f4cff 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,7 +1,7 @@
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream};
-use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
+use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
use rustc_ast::{self as ast};
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
use rustc_errors::PResult;
@@ -41,7 +41,7 @@ impl AttrWrapper {
}
pub(crate) fn take_for_recovery(self, sess: &ParseSess) -> AttrVec {
- sess.span_diagnostic.delay_span_bug(
+ sess.dcx.span_delayed_bug(
self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
"AttrVec is taken for recovery but no error is produced",
);
@@ -266,9 +266,7 @@ impl<'a> Parser<'a> {
if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&inner_attr.id) {
inner_attr_replace_ranges.push(attr_range);
} else {
- self.sess
- .span_diagnostic
- .delay_span_bug(inner_attr.span, "Missing token range for attribute");
+ self.dcx().span_delayed_bug(inner_attr.span, "Missing token range for attribute");
}
}
@@ -390,7 +388,7 @@ fn make_token_stream(
#[derive(Debug)]
struct FrameData {
// This is `None` for the first frame, `Some` for all others.
- open_delim_sp: Option<(Delimiter, Span)>,
+ open_delim_sp: Option<(Delimiter, Span, Spacing)>,
inner: Vec<AttrTokenTree>,
}
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
@@ -398,21 +396,23 @@ fn make_token_stream(
while let Some((token, spacing)) = token_and_spacing {
match token {
FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
- stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
+ stack
+ .push(FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] });
}
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
let frame_data = stack
.pop()
.unwrap_or_else(|| panic!("Token stack was empty for token: {token:?}"));
- let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
+ let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
assert_eq!(
open_delim, delim,
"Mismatched open/close delims: open={open_delim:?} close={span:?}"
);
let dspan = DelimSpan::from_pair(open_sp, span);
+ let dspacing = DelimSpacing::new(open_spacing, spacing);
let stream = AttrTokenStream::new(frame_data.inner);
- let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
+ let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
stack
.last_mut()
.unwrap_or_else(|| panic!("Bottom token frame is missing for token: {token:?}"))
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 2a8eb6edd..c077e0a83 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -10,31 +10,32 @@ use crate::errors::{
ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything, DocCommentOnParamType,
DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
- HelpIdentifierStartsWithNumber, InInTypo, IncorrectAwait, IncorrectSemicolon,
- IncorrectUseOfAwait, ParenthesesInForHead, ParenthesesInForHeadSugg,
- PatternMethodParamWithoutBody, QuestionMarkInType, QuestionMarkInTypeSugg, SelfParamNotFirst,
- StructLiteralBodyWithoutPath, StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens,
- StructLiteralNeedingParensSugg, SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma,
- TernaryOperator, UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
+ HelpIdentifierStartsWithNumber, HelpUseLatestEdition, InInTypo, IncorrectAwait,
+ IncorrectSemicolon, IncorrectUseOfAwait, PatternMethodParamWithoutBody, QuestionMarkInType,
+ QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
+ StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens, StructLiteralNeedingParensSugg,
+ SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma, TernaryOperator,
+ UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType,
};
-
use crate::fluent_generated as fluent;
use crate::parser;
+use crate::parser::attr::InnerAttrPolicy;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
+use rustc_ast::tokenstream::AttrTokenTree;
use rustc_ast::util::parser::AssocOp;
use rustc_ast::{
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingAnnotation, Block,
- BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind,
- Path, PathSegment, QSelf, Ty, TyKind,
+ BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
+ PatKind, Path, PathSegment, QSelf, Ty, TyKind,
};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{
- pluralize, AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage,
- ErrorGuaranteed, FatalError, Handler, IntoDiagnostic, MultiSpan, PResult,
+ pluralize, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
+ DiagnosticMessage, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan, PResult,
};
use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned;
@@ -245,15 +246,15 @@ impl<'a> Parser<'a> {
sp: S,
m: impl Into<DiagnosticMessage>,
) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
- self.sess.span_diagnostic.struct_span_err(sp, m)
+ self.dcx().struct_span_err(sp, m)
}
- pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: impl Into<String>) -> ! {
- self.sess.span_diagnostic.span_bug(sp, m)
+ pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: impl Into<DiagnosticMessage>) -> ! {
+ self.dcx().span_bug(sp, msg)
}
- pub(super) fn diagnostic(&self) -> &'a Handler {
- &self.sess.span_diagnostic
+ pub(super) fn dcx(&self) -> &'a DiagCtxt {
+ &self.sess.dcx
}
/// Replace `self` with `snapshot.parser`.
@@ -283,7 +284,7 @@ impl<'a> Parser<'a> {
span: self.prev_token.span,
missing_comma: None,
}
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(self.dcx()));
}
let valid_follow = &[
@@ -346,7 +347,7 @@ impl<'a> Parser<'a> {
suggest_remove_comma,
help_cannot_start_number,
};
- let mut err = err.into_diagnostic(&self.sess.span_diagnostic);
+ let mut err = err.into_diagnostic(self.dcx());
// if the token we have is a `<`
// it *might* be a misplaced generic
@@ -506,7 +507,9 @@ impl<'a> Parser<'a> {
if expected.contains(&TokenType::Token(token::Semi)) {
// If the user is trying to write a ternary expression, recover it and
// return an Err to prevent a cascade of irrelevant diagnostics
- if self.prev_token == token::Question && let Err(e) = self.maybe_recover_from_ternary_operator() {
+ if self.prev_token == token::Question
+ && let Err(e) = self.maybe_recover_from_ternary_operator()
+ {
return Err(e);
}
@@ -637,6 +640,28 @@ impl<'a> Parser<'a> {
}
}
+ // Try to detect an intended c-string literal while using a pre-2021 edition. The heuristic
+ // here is to identify a cooked, uninterpolated `c` id immediately followed by a string, or
+ // a cooked, uninterpolated `cr` id immediately followed by a string or a `#`, in an edition
+ // where c-string literals are not allowed. There is the very slight possibility of a false
+ // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
+ // that in the parser requires unbounded lookahead, so we only add a hint to the existing
+ // error rather than replacing it entirely.
+ if ((self.prev_token.kind == TokenKind::Ident(sym::c, false)
+ && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
+ || (self.prev_token.kind == TokenKind::Ident(sym::cr, false)
+ && matches!(
+ &self.token.kind,
+ TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
+ )))
+ && self.prev_token.span.hi() == self.token.span.lo()
+ && !self.token.span.at_least_rust_2021()
+ {
+ err.note("you may be trying to write a c-string literal");
+ err.note("c-string literals require Rust 2021 or later");
+ HelpUseLatestEdition::new().add_to_diagnostic(&mut err);
+ }
+
// `pub` may be used for an item or `pub(crate)`
if self.prev_token.is_ident_named(sym::public)
&& (self.token.can_begin_item()
@@ -670,15 +695,6 @@ impl<'a> Parser<'a> {
);
}
- // Add suggestion for a missing closing angle bracket if '>' is included in expected_tokens
- // there are unclosed angle brackets
- if self.unmatched_angle_bracket_count > 0
- && self.token.kind == TokenKind::Eq
- && expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Gt)))
- {
- err.span_label(self.prev_token.span, "maybe try to close unmatched angle bracket");
- }
-
let sp = if self.token == token::Eof {
// This is EOF; don't want to point at the following char, but rather the last token.
self.prev_token.span
@@ -720,6 +736,95 @@ impl<'a> Parser<'a> {
Err(err)
}
+ pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) {
+ // Missing semicolon typo error.
+ let span = self.prev_token.span.shrink_to_hi();
+ let mut err = self.sess.create_err(ExpectedSemi {
+ span,
+ token: self.token.clone(),
+ unexpected_token_label: Some(self.token.span),
+ sugg: ExpectedSemiSugg::AddSemi(span),
+ });
+ let attr_span = match &expr.attrs[..] {
+ [] => unreachable!(),
+ [only] => only.span,
+ [first, rest @ ..] => {
+ for attr in rest {
+ err.span_label(attr.span, "");
+ }
+ first.span
+ }
+ };
+ err.span_label(
+ attr_span,
+ format!(
+ "only `;` terminated statements or tail expressions are allowed after {}",
+ if expr.attrs.len() == 1 { "this attribute" } else { "these attributes" },
+ ),
+ );
+ if self.token == token::Pound
+ && self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Bracket))
+ {
+ // We have
+ // #[attr]
+ // expr
+ // #[not_attr]
+ // other_expr
+ err.span_label(span, "expected `;` here");
+ err.multipart_suggestion(
+ "alternatively, consider surrounding the expression with a block",
+ vec![
+ (expr.span.shrink_to_lo(), "{ ".to_string()),
+ (expr.span.shrink_to_hi(), " }".to_string()),
+ ],
+ Applicability::MachineApplicable,
+ );
+ let mut snapshot = self.create_snapshot_for_diagnostic();
+ if let [attr] = &expr.attrs[..]
+ && let ast::AttrKind::Normal(attr_kind) = &attr.kind
+ && let [segment] = &attr_kind.item.path.segments[..]
+ && segment.ident.name == sym::cfg
+ && let Some(args_span) = attr_kind.item.args.span()
+ && let Ok(next_attr) = snapshot.parse_attribute(InnerAttrPolicy::Forbidden(None))
+ && let ast::AttrKind::Normal(next_attr_kind) = next_attr.kind
+ && let Some(next_attr_args_span) = next_attr_kind.item.args.span()
+ && let [next_segment] = &next_attr_kind.item.path.segments[..]
+ && segment.ident.name == sym::cfg
+ && let Ok(next_expr) = snapshot.parse_expr()
+ {
+ // We have for sure
+ // #[cfg(..)]
+ // expr
+ // #[cfg(..)]
+ // other_expr
+ // So we suggest using `if cfg!(..) { expr } else if cfg!(..) { other_expr }`.
+ let margin = self.sess.source_map().span_to_margin(next_expr.span).unwrap_or(0);
+ let sugg = vec![
+ (attr.span.with_hi(segment.span().hi()), "if cfg!".to_string()),
+ (args_span.shrink_to_hi().with_hi(attr.span.hi()), " {".to_string()),
+ (expr.span.shrink_to_lo(), " ".to_string()),
+ (
+ next_attr.span.with_hi(next_segment.span().hi()),
+ "} else if cfg!".to_string(),
+ ),
+ (
+ next_attr_args_span.shrink_to_hi().with_hi(next_attr.span.hi()),
+ " {".to_string(),
+ ),
+ (next_expr.span.shrink_to_lo(), " ".to_string()),
+ (next_expr.span.shrink_to_hi(), format!("\n{}}}", " ".repeat(margin))),
+ ];
+ err.multipart_suggestion(
+ "it seems like you are trying to provide different expressions depending on \
+ `cfg`, consider using `if cfg!(..)`",
+ sugg,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ err.emit();
+ }
+
fn check_too_many_raw_str_terminators(&mut self, err: &mut Diagnostic) -> bool {
let sm = self.sess.source_map();
match (&self.prev_token.kind, &self.token.kind) {
@@ -1182,11 +1287,11 @@ impl<'a> Parser<'a> {
(BinOpKind::Ge, AssocOp::GreaterEqual | AssocOp::Greater) => {
let expr_to_str = |e: &Expr| {
self.span_to_snippet(e.span)
- .unwrap_or_else(|_| pprust::expr_to_string(&e))
+ .unwrap_or_else(|_| pprust::expr_to_string(e))
};
err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::SplitComparison {
span: inner_op.span.shrink_to_hi(),
- middle_term: expr_to_str(&r1),
+ middle_term: expr_to_str(r1),
});
false // Keep the current parse behavior, where the AST is `(x < y) < z`.
}
@@ -1327,7 +1432,7 @@ impl<'a> Parser<'a> {
// Not entirely sure now, but we bubble the error up with the
// suggestion.
self.restore_snapshot(snapshot);
- Err(err.into_diagnostic(&self.sess.span_diagnostic))
+ Err(err.into_diagnostic(self.dcx()))
}
}
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
@@ -1342,7 +1447,7 @@ impl<'a> Parser<'a> {
}
// Consume the fn call arguments.
match self.consume_fn_args() {
- Err(()) => Err(err.into_diagnostic(&self.sess.span_diagnostic)),
+ Err(()) => Err(err.into_diagnostic(self.dcx())),
Ok(()) => {
self.sess.emit_err(err);
// FIXME: actually check that the two expressions in the binop are
@@ -1368,7 +1473,7 @@ impl<'a> Parser<'a> {
mk_err_expr(self, inner_op.span.to(self.prev_token.span))
} else {
// These cases cause too many knock-down errors, bail out (#61329).
- Err(err.into_diagnostic(&self.sess.span_diagnostic))
+ Err(err.into_diagnostic(self.dcx()))
}
};
}
@@ -1407,7 +1512,7 @@ impl<'a> Parser<'a> {
pub(super) fn maybe_report_ambiguous_plus(&mut self, impl_dyn_multi: bool, ty: &Ty) {
if impl_dyn_multi {
- self.sess.emit_err(AmbiguousPlus { sum_ty: pprust::ty_to_string(&ty), span: ty.span });
+ self.sess.emit_err(AmbiguousPlus { sum_ty: pprust::ty_to_string(ty), span: ty.span });
}
}
@@ -1840,7 +1945,7 @@ impl<'a> Parser<'a> {
self.sess.emit_err(IncorrectAwait {
span,
sugg_span: (span, applicability),
- expr: self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(&expr)),
+ expr: self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(expr)),
question_mark: if is_question { "?" } else { "" },
});
@@ -1895,54 +2000,37 @@ impl<'a> Parser<'a> {
}
}
- /// Recovers a situation like `for ( $pat in $expr )`
- /// and suggest writing `for $pat in $expr` instead.
- ///
- /// This should be called before parsing the `$block`.
- pub(super) fn recover_parens_around_for_head(
+ /// When trying to close a generics list and encountering code like
+ /// ```text
+ /// impl<S: Into<std::borrow::Cow<'static, str>> From<S> for Canonical {}
+ /// // ^ missing > here
+ /// ```
+ /// we provide a structured suggestion on the error from `expect_gt`.
+ pub(super) fn expect_gt_or_maybe_suggest_closing_generics(
&mut self,
- pat: P<Pat>,
- begin_paren: Option<Span>,
- ) -> P<Pat> {
- match (&self.token.kind, begin_paren) {
- (token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
- self.bump();
-
- let sm = self.sess.source_map();
- let left = begin_par_sp;
- let right = self.prev_token.span;
- let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left)
- && !snip.ends_with(' ')
- {
- " ".to_string()
- } else {
- "".to_string()
- };
-
- let right_snippet = if let Ok(snip) = sm.span_to_next_source(right)
- && !snip.starts_with(' ')
- {
- " ".to_string()
- } else {
- "".to_string()
- };
-
- self.sess.emit_err(ParenthesesInForHead {
- span: vec![left, right],
- // With e.g. `for (x) in y)` this would replace `(x) in y)`
- // with `x) in y)` which is syntactically invalid.
- // However, this is prevented before we get here.
- sugg: ParenthesesInForHeadSugg { left, right, left_snippet, right_snippet },
- });
-
- // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
- pat.and_then(|pat| match pat.kind {
- PatKind::Paren(pat) => pat,
- _ => P(pat),
+ params: &[ast::GenericParam],
+ ) -> PResult<'a, ()> {
+ let Err(mut err) = self.expect_gt() else {
+ return Ok(());
+ };
+ // Attempt to find places where a missing `>` might belong.
+ if let [.., ast::GenericParam { bounds, .. }] = params
+ && let Some(poly) = bounds
+ .iter()
+ .filter_map(|bound| match bound {
+ ast::GenericBound::Trait(poly, _) => Some(poly),
+ _ => None,
})
- }
- _ => pat,
+ .last()
+ {
+ err.span_suggestion_verbose(
+ poly.span.shrink_to_hi(),
+ "you might have meant to end the type parameters here",
+ ">",
+ Applicability::MaybeIncorrect,
+ );
}
+ Err(err)
}
pub(super) fn recover_seq_parse_error(
@@ -2250,6 +2338,59 @@ impl<'a> Parser<'a> {
err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
}
err.span_label(span, "expected expression");
+
+ // Walk the chain of macro expansions for the current token to point at how the original
+ // code was interpreted. This helps the user realize when a macro argument of one type is
+ // later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
+ // in a subsequent macro invocation (#71039).
+ let mut tok = self.token.clone();
+ let mut labels = vec![];
+ while let TokenKind::Interpolated(node) = &tok.kind {
+ let tokens = node.0.tokens();
+ labels.push(node.clone());
+ if let Some(tokens) = tokens
+ && let tokens = tokens.to_attr_token_stream()
+ && let tokens = tokens.0.deref()
+ && let [AttrTokenTree::Token(token, _)] = &tokens[..]
+ {
+ tok = token.clone();
+ } else {
+ break;
+ }
+ }
+ let mut iter = labels.into_iter().peekable();
+ let mut show_link = false;
+ while let Some(node) = iter.next() {
+ let descr = node.0.descr();
+ if let Some(next) = iter.peek() {
+ let next_descr = next.0.descr();
+ if next_descr != descr {
+ err.span_label(next.1, format!("this macro fragment matcher is {next_descr}"));
+ err.span_label(node.1, format!("this macro fragment matcher is {descr}"));
+ err.span_label(
+ next.0.use_span(),
+ format!("this is expected to be {next_descr}"),
+ );
+ err.span_label(
+ node.0.use_span(),
+ format!(
+ "this is interpreted as {}, but it is expected to be {}",
+ next_descr, descr,
+ ),
+ );
+ show_link = true;
+ } else {
+ err.span_label(node.1, "");
+ }
+ }
+ }
+ if show_link {
+ err.note(
+ "when forwarding a matched fragment to another macro-by-example, matchers in the \
+ second macro will see an opaque AST of the fragment type, not the underlying \
+ tokens",
+ );
+ }
err
}
@@ -2420,8 +2561,7 @@ impl<'a> Parser<'a> {
Ok(Some(GenericArg::Const(self.parse_const_arg()?)))
} else {
let after_kw_const = self.token.span;
- self.recover_const_arg(after_kw_const, err.into_diagnostic(&self.sess.span_diagnostic))
- .map(Some)
+ self.recover_const_arg(after_kw_const, err.into_diagnostic(self.dcx())).map(Some)
}
}
@@ -2721,7 +2861,6 @@ impl<'a> Parser<'a> {
pub(crate) fn maybe_recover_unexpected_comma(
&mut self,
lo: Span,
- is_mac_invoc: bool,
rt: CommaRecoveryMode,
) -> PResult<'a, ()> {
if self.token != token::Comma {
@@ -2742,28 +2881,24 @@ impl<'a> Parser<'a> {
let seq_span = lo.to(self.prev_token.span);
let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern");
if let Ok(seq_snippet) = self.span_to_snippet(seq_span) {
- if is_mac_invoc {
- err.note(fluent::parse_macro_expands_to_match_arm);
- } else {
- err.multipart_suggestion(
- format!(
- "try adding parentheses to match on a tuple{}",
- if let CommaRecoveryMode::LikelyTuple = rt { "" } else { "..." },
- ),
- vec![
- (seq_span.shrink_to_lo(), "(".to_string()),
- (seq_span.shrink_to_hi(), ")".to_string()),
- ],
+ err.multipart_suggestion(
+ format!(
+ "try adding parentheses to match on a tuple{}",
+ if let CommaRecoveryMode::LikelyTuple = rt { "" } else { "..." },
+ ),
+ vec![
+ (seq_span.shrink_to_lo(), "(".to_string()),
+ (seq_span.shrink_to_hi(), ")".to_string()),
+ ],
+ Applicability::MachineApplicable,
+ );
+ if let CommaRecoveryMode::EitherTupleOrPipe = rt {
+ err.span_suggestion(
+ seq_span,
+ "...or a vertical bar to match on multiple alternatives",
+ seq_snippet.replace(',', " |"),
Applicability::MachineApplicable,
);
- if let CommaRecoveryMode::EitherTupleOrPipe = rt {
- err.span_suggestion(
- seq_span,
- "...or a vertical bar to match on multiple alternatives",
- seq_snippet.replace(',', " |"),
- Applicability::MachineApplicable,
- );
- }
}
}
Err(err)
@@ -2784,7 +2919,7 @@ impl<'a> Parser<'a> {
span: path.span.shrink_to_hi(),
between: between_span,
}
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(self.dcx()));
}
}
}
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 235b28b6e..cd3e8b92f 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,3 +1,4 @@
+// ignore-tidy-filelength
use super::diagnostics::SnapshotParser;
use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
@@ -9,7 +10,7 @@ use super::{
use crate::errors;
use crate::maybe_recover_from_interpolated_ty_qpath;
use ast::mut_visit::{noop_visit_expr, MutVisitor};
-use ast::{GenBlockKind, Path, PathSegment};
+use ast::{CoroutineKind, GenBlockKind, Pat, Path, PathSegment};
use core::mem;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
@@ -20,7 +21,7 @@ use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
use rustc_ast::visit::Visitor;
use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, UnOp, DUMMY_NODE_ID};
use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
-use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
+use rustc_ast::{Arm, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::stack::ensure_sufficient_stack;
@@ -46,7 +47,7 @@ use thin_vec::{thin_vec, ThinVec};
macro_rules! maybe_whole_expr {
($p:expr) => {
if let token::Interpolated(nt) = &$p.token.kind {
- match &**nt {
+ match &nt.0 {
token::NtExpr(e) | token::NtLiteral(e) => {
let e = e.clone();
$p.bump();
@@ -1060,7 +1061,7 @@ impl<'a> Parser<'a> {
match &*components {
// 1e2
[IdentLike(i)] => {
- DestructuredFloat::Single(Symbol::intern(&i), span)
+ DestructuredFloat::Single(Symbol::intern(i), span)
}
// 1.
[IdentLike(i), Punct('.')] => {
@@ -1072,7 +1073,7 @@ impl<'a> Parser<'a> {
} else {
(span, span)
};
- let symbol = Symbol::intern(&i);
+ let symbol = Symbol::intern(i);
DestructuredFloat::TrailingDot(symbol, ident_span, dot_span)
}
// 1.2 | 1.2e3
@@ -1088,8 +1089,8 @@ impl<'a> Parser<'a> {
} else {
(span, span, span)
};
- let symbol1 = Symbol::intern(&i1);
- let symbol2 = Symbol::intern(&i2);
+ let symbol1 = Symbol::intern(i1);
+ let symbol2 = Symbol::intern(i2);
DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span)
}
// 1e+ | 1e- (recovered)
@@ -1268,7 +1269,7 @@ impl<'a> Parser<'a> {
.collect(),
},
}
- .into_diagnostic(&self.sess.span_diagnostic);
+ .into_diagnostic(self.dcx());
replacement_err.emit();
let old_err = mem::replace(err, replacement_err);
@@ -1439,22 +1440,25 @@ impl<'a> Parser<'a> {
} else if this.eat_keyword(kw::Underscore) {
Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
} else if this.token.uninterpolated_span().at_least_rust_2018() {
- // `Span:.at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
- if this.check_keyword(kw::Async) {
- if this.is_gen_block(kw::Async) {
- // Check for `async {` and `async move {`.
+ // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
+ if this.token.uninterpolated_span().at_least_rust_2024()
+ // check for `gen {}` and `gen move {}`
+ // or `async gen {}` and `async gen move {}`
+ && (this.is_gen_block(kw::Gen, 0)
+ || (this.check_keyword(kw::Async) && this.is_gen_block(kw::Gen, 1)))
+ {
+ // FIXME: (async) gen closures aren't yet parsed.
+ this.parse_gen_block()
+ } else if this.check_keyword(kw::Async) {
+ // FIXME(gen_blocks): Parse `gen async` and suggest swap
+ if this.is_gen_block(kw::Async, 0) {
+ // Check for `async {` and `async move {`,
this.parse_gen_block()
} else {
this.parse_expr_closure()
}
- } else if this.eat_keyword(kw::Await) {
+ } else if this.eat_keyword_noexpect(kw::Await) {
this.recover_incorrect_await_syntax(lo, this.prev_token.span)
- } else if this.token.uninterpolated_span().at_least_rust_2024() {
- if this.is_gen_block(kw::Gen) {
- this.parse_gen_block()
- } else {
- this.parse_expr_lit()
- }
} else {
this.parse_expr_lit()
}
@@ -1689,8 +1693,7 @@ impl<'a> Parser<'a> {
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
) -> L {
- if let Some(mut diag) =
- self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
+ if let Some(mut diag) = self.dcx().steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
{
diag.span_suggestion_verbose(
lifetime.span.shrink_to_hi(),
@@ -1880,8 +1883,8 @@ impl<'a> Parser<'a> {
self.bump(); // `#`
let Some((ident, false)) = self.token.ident() else {
- let err = errors::ExpectedBuiltinIdent { span: self.token.span }
- .into_diagnostic(&self.sess.span_diagnostic);
+ let err =
+ errors::ExpectedBuiltinIdent { span: self.token.span }.into_diagnostic(self.dcx());
return Err(err);
};
self.sess.gated_spans.gate(sym::builtin_syntax, ident.span);
@@ -1892,7 +1895,7 @@ impl<'a> Parser<'a> {
Ok(res)
} else {
let err = errors::UnknownBuiltinConstruct { span: lo.to(ident.span), name: ident.name }
- .into_diagnostic(&self.sess.span_diagnostic);
+ .into_diagnostic(self.dcx());
return Err(err);
};
self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?;
@@ -1952,11 +1955,11 @@ impl<'a> Parser<'a> {
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
) -> PResult<'a, L> {
if let token::Interpolated(nt) = &self.token.kind
- && let token::NtExpr(e) | token::NtLiteral(e) = &**nt
+ && let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
&& matches!(e.kind, ExprKind::Err)
{
let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
- .into_diagnostic(&self.sess.span_diagnostic);
+ .into_diagnostic(self.dcx());
err.downgrade_to_delayed_bug();
return Err(err);
}
@@ -2052,7 +2055,7 @@ impl<'a> Parser<'a> {
Err(err) => {
let span = token.uninterpolated_span();
self.bump();
- report_lit_error(&self.sess, err, lit, span);
+ report_lit_error(self.sess, err, lit, span);
// Pack possible quotes and prefixes from the original literal into
// the error literal's symbol so they can be pretty-printed faithfully.
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
@@ -2168,7 +2171,7 @@ impl<'a> Parser<'a> {
return Err(errors::MissingSemicolonBeforeArray {
open_delim: open_delim_span,
semicolon: prev_span.shrink_to_hi(),
- }.into_diagnostic(&self.sess.span_diagnostic));
+ }.into_diagnostic(self.dcx()));
}
Ok(_) => (),
Err(err) => err.cancel(),
@@ -2233,10 +2236,10 @@ impl<'a> Parser<'a> {
let movability =
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
- let asyncness = if self.token.uninterpolated_span().at_least_rust_2018() {
- self.parse_asyncness(Case::Sensitive)
+ let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() {
+ self.parse_coroutine_kind(Case::Sensitive)
} else {
- Async::No
+ None
};
let capture_clause = self.parse_capture_clause()?;
@@ -2260,13 +2263,21 @@ impl<'a> Parser<'a> {
}
};
- if let Async::Yes { span, .. } = asyncness {
- // Feature-gate `async ||` closures.
- self.sess.gated_spans.gate(sym::async_closure, span);
+ match coroutine_kind {
+ Some(CoroutineKind::Async { span, .. }) => {
+ // Feature-gate `async ||` closures.
+ self.sess.gated_spans.gate(sym::async_closure, span);
+ }
+ Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
+ // Feature-gate `gen ||` and `async gen ||` closures.
+ // FIXME(gen_blocks): This perhaps should be a different gate.
+ self.sess.gated_spans.gate(sym::gen_blocks, span);
+ }
+ None => {}
}
if self.token.kind == TokenKind::Semi
- && matches!(self.token_cursor.stack.last(), Some((_, Delimiter::Parenthesis, _)))
+ && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
&& self.may_recover()
{
// It is likely that the closure body is a block but where the
@@ -2283,7 +2294,7 @@ impl<'a> Parser<'a> {
binder,
capture_clause,
constness,
- asyncness,
+ coroutine_kind,
movability,
fn_decl,
body,
@@ -2308,7 +2319,7 @@ impl<'a> Parser<'a> {
if self.check_keyword(kw::Async) {
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
Err(errors::AsyncMoveOrderIncorrect { span: move_async_span }
- .into_diagnostic(&self.sess.span_diagnostic))
+ .into_diagnostic(self.dcx()))
} else {
Ok(CaptureBy::Value { move_kw: move_kw_span })
}
@@ -2477,7 +2488,7 @@ impl<'a> Parser<'a> {
let mut cond =
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, None)?;
- CondChecker { parser: self, forbid_let_reason: None }.visit_expr(&mut cond);
+ CondChecker::new(self).visit_expr(&mut cond);
if let ExprKind::Let(_, _, _, None) = cond.kind {
// Remove the last feature gating of a `let` expression since it's stable.
@@ -2493,10 +2504,12 @@ impl<'a> Parser<'a> {
let err = errors::ExpectedExpressionFoundLet {
span: self.token.span,
reason: ForbiddenLetReason::OtherForbidden,
+ missing_let: None,
+ comparison: None,
};
if self.prev_token.kind == token::BinOp(token::Or) {
// This was part of a closure, the that part of the parser recover.
- return Err(err.into_diagnostic(&self.sess.span_diagnostic));
+ return Err(err.into_diagnostic(self.dcx()));
} else {
Some(self.sess.emit_err(err))
}
@@ -2606,30 +2619,72 @@ impl<'a> Parser<'a> {
}
}
- /// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
- fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
- // Record whether we are about to parse `for (`.
- // This is used below for recovery in case of `for ( $stuff ) $block`
- // in which case we will suggest `for $stuff $block`.
- let begin_paren = match self.token.kind {
- token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span),
- _ => None,
+ fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
+ let begin_paren = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
+ // Record whether we are about to parse `for (`.
+ // This is used below for recovery in case of `for ( $stuff ) $block`
+ // in which case we will suggest `for $stuff $block`.
+ let start_span = self.token.span;
+ let left = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
+ Some((start_span, left))
+ } else {
+ None
+ };
+ // Try to parse the pattern `for ($PAT) in $EXPR`.
+ let pat = match (
+ self.parse_pat_allow_top_alt(
+ None,
+ RecoverComma::Yes,
+ RecoverColon::Yes,
+ CommaRecoveryMode::LikelyTuple,
+ ),
+ begin_paren,
+ ) {
+ (Ok(pat), _) => pat, // Happy path.
+ (Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => {
+ // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
+ // happen right before the return of this method.
+ let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None) {
+ Ok(expr) => expr,
+ Err(expr_err) => {
+ // We don't know what followed the `in`, so cancel and bubble up the
+ // original error.
+ expr_err.cancel();
+ return Err(err);
+ }
+ };
+ return if self.token.kind == token::CloseDelim(Delimiter::Parenthesis) {
+ // We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
+ // parser state and emit a targetted suggestion.
+ let span = vec![start_span, self.token.span];
+ let right = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
+ self.bump(); // )
+ err.cancel();
+ self.sess.emit_err(errors::ParenthesesInForHead {
+ span,
+ // With e.g. `for (x) in y)` this would replace `(x) in y)`
+ // with `x) in y)` which is syntactically invalid.
+ // However, this is prevented before we get here.
+ sugg: errors::ParenthesesInForHeadSugg { left, right },
+ });
+ Ok((self.mk_pat(start_span.to(right), ast::PatKind::Wild), expr))
+ } else {
+ Err(err) // Some other error, bubble up.
+ };
+ }
+ (Err(err), _) => return Err(err), // Some other error, bubble up.
};
-
- let pat = self.parse_pat_allow_top_alt(
- None,
- RecoverComma::Yes,
- RecoverColon::Yes,
- CommaRecoveryMode::LikelyTuple,
- )?;
if !self.eat_keyword(kw::In) {
self.error_missing_in_for_loop();
}
self.check_for_for_in_in_typo(self.prev_token.span);
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+ Ok((pat, expr))
+ }
- let pat = self.recover_parens_around_for_head(pat, begin_paren);
-
+ /// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
+ fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+ let (pat, expr) = self.parse_for_head()?;
// Recover from missing expression in `for` loop
if matches!(expr.kind, ExprKind::Block(..))
&& !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace))
@@ -2850,167 +2905,167 @@ impl<'a> Parser<'a> {
}
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
- // Used to check the `let_chains` and `if_let_guard` features mostly by scanning
- // `&&` tokens.
- fn check_let_expr(expr: &Expr) -> (bool, bool) {
- match &expr.kind {
- ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
- let lhs_rslt = check_let_expr(lhs);
- let rhs_rslt = check_let_expr(rhs);
- (lhs_rslt.0 || rhs_rslt.0, false)
- }
- ExprKind::Let(..) => (true, true),
- _ => (false, true),
- }
- }
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
- let pat = this.parse_pat_allow_top_alt(
- None,
- RecoverComma::Yes,
- RecoverColon::Yes,
- CommaRecoveryMode::EitherTupleOrPipe,
- )?;
- let guard = if this.eat_keyword(kw::If) {
- let if_span = this.prev_token.span;
- let mut cond = this.parse_match_guard_condition()?;
-
- CondChecker { parser: this, forbid_let_reason: None }.visit_expr(&mut cond);
-
- let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
- if has_let_expr {
- if does_not_have_bin_op {
- // Remove the last feature gating of a `let` expression since it's stable.
- this.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
- }
- let span = if_span.to(cond.span);
- this.sess.gated_spans.gate(sym::if_let_guard, span);
- }
- Some(cond)
+ let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
+
+ let span_before_body = this.prev_token.span;
+ let arm_body;
+ let is_fat_arrow = this.check(&token::FatArrow);
+ let is_almost_fat_arrow = TokenKind::FatArrow
+ .similar_tokens()
+ .is_some_and(|similar_tokens| similar_tokens.contains(&this.token.kind));
+ let mut result = if !is_fat_arrow && !is_almost_fat_arrow {
+ // A pattern without a body, allowed for never patterns.
+ arm_body = None;
+ this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]).map(
+ |x| {
+ // Don't gate twice
+ if !pat.contains_never_pattern() {
+ this.sess.gated_spans.gate(sym::never_patterns, pat.span);
+ }
+ x
+ },
+ )
} else {
- None
- };
- let arrow_span = this.token.span;
- if let Err(mut err) = this.expect(&token::FatArrow) {
- // We might have a `=>` -> `=` or `->` typo (issue #89396).
- if TokenKind::FatArrow
- .similar_tokens()
- .is_some_and(|similar_tokens| similar_tokens.contains(&this.token.kind))
- {
- err.span_suggestion(
- this.token.span,
- "use a fat arrow to start a match arm",
- "=>",
- Applicability::MachineApplicable,
- );
- err.emit();
- this.bump();
- } else if matches!(
- (&this.prev_token.kind, &this.token.kind),
- (token::DotDotEq, token::Gt)
- ) {
- // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
- // so we suppress the error here
- err.delay_as_bug();
- this.bump();
- } else {
- return Err(err);
+ if let Err(mut err) = this.expect(&token::FatArrow) {
+ // We might have a `=>` -> `=` or `->` typo (issue #89396).
+ if is_almost_fat_arrow {
+ err.span_suggestion(
+ this.token.span,
+ "use a fat arrow to start a match arm",
+ "=>",
+ Applicability::MachineApplicable,
+ );
+ if matches!(
+ (&this.prev_token.kind, &this.token.kind),
+ (token::DotDotEq, token::Gt)
+ ) {
+ // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
+ // so we suppress the error here
+ err.delay_as_bug();
+ } else {
+ err.emit();
+ }
+ this.bump();
+ } else {
+ return Err(err);
+ }
}
- }
- let arm_start_span = this.token.span;
+ let arrow_span = this.prev_token.span;
+ let arm_start_span = this.token.span;
- let expr = this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
- err.span_label(arrow_span, "while parsing the `match` arm starting here");
- err
- })?;
+ let expr =
+ this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
+ err.span_label(arrow_span, "while parsing the `match` arm starting here");
+ err
+ })?;
- let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
- && this.token != token::CloseDelim(Delimiter::Brace);
-
- let hi = this.prev_token.span;
-
- if require_comma {
- let sm = this.sess.source_map();
- if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
- let span = body.span;
- return Ok((
- ast::Arm {
- attrs,
- pat,
- guard,
- body,
- span,
- id: DUMMY_NODE_ID,
- is_placeholder: false,
- },
- TrailingToken::None,
- ));
- }
- this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
- .or_else(|mut err| {
- if this.token == token::FatArrow {
- if let Ok(expr_lines) = sm.span_to_lines(expr.span)
- && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
- && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
- && expr_lines.lines.len() == 2
- {
- // We check whether there's any trailing code in the parse span,
- // if there isn't, we very likely have the following:
- //
- // X | &Y => "y"
- // | -- - missing comma
- // | |
- // | arrow_span
- // X | &X => "x"
- // | - ^^ self.token.span
- // | |
- // | parsed until here as `"y" & X`
- err.span_suggestion_short(
- arm_start_span.shrink_to_hi(),
- "missing a comma here to end this `match` arm",
- ",",
- Applicability::MachineApplicable,
+ let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
+ && this.token != token::CloseDelim(Delimiter::Brace);
+
+ if !require_comma {
+ arm_body = Some(expr);
+ this.eat(&token::Comma);
+ Ok(false)
+ } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
+ arm_body = Some(body);
+ Ok(true)
+ } else {
+ let expr_span = expr.span;
+ arm_body = Some(expr);
+ this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
+ .map_err(|mut err| {
+ if this.token == token::FatArrow {
+ let sm = this.sess.source_map();
+ if let Ok(expr_lines) = sm.span_to_lines(expr_span)
+ && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
+ && arm_start_lines.lines[0].end_col
+ == expr_lines.lines[0].end_col
+ && expr_lines.lines.len() == 2
+ {
+ // We check whether there's any trailing code in the parse span,
+ // if there isn't, we very likely have the following:
+ //
+ // X | &Y => "y"
+ // | -- - missing comma
+ // | |
+ // | arrow_span
+ // X | &X => "x"
+ // | - ^^ self.token.span
+ // | |
+ // | parsed until here as `"y" & X`
+ err.span_suggestion_short(
+ arm_start_span.shrink_to_hi(),
+ "missing a comma here to end this `match` arm",
+ ",",
+ Applicability::MachineApplicable,
+ );
+ }
+ } else {
+ err.span_label(
+ arrow_span,
+ "while parsing the `match` arm starting here",
);
- return Err(err);
- }
- } else {
- // FIXME(compiler-errors): We could also recover `; PAT =>` here
-
- // Try to parse a following `PAT =>`, if successful
- // then we should recover.
- let mut snapshot = this.create_snapshot_for_diagnostic();
- let pattern_follows = snapshot
- .parse_pat_allow_top_alt(
- None,
- RecoverComma::Yes,
- RecoverColon::Yes,
- CommaRecoveryMode::EitherTupleOrPipe,
- )
- .map_err(|err| err.cancel())
- .is_ok();
- if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
- err.cancel();
- this.sess.emit_err(errors::MissingCommaAfterMatchArm {
- span: hi.shrink_to_hi(),
- });
- return Ok(true);
}
- }
- err.span_label(arrow_span, "while parsing the `match` arm starting here");
- Err(err)
- })?;
- } else {
- this.eat(&token::Comma);
+ err
+ })
+ }
+ };
+
+ let hi_span = arm_body.as_ref().map_or(span_before_body, |body| body.span);
+ let arm_span = lo.to(hi_span);
+
+ // We want to recover:
+ // X | Some(_) => foo()
+ // | - missing comma
+ // X | None => "x"
+ // | ^^^^ self.token.span
+ // as well as:
+ // X | Some(!)
+ // | - missing comma
+ // X | None => "x"
+ // | ^^^^ self.token.span
+ // But we musn't recover
+ // X | pat[0] => {}
+ // | ^ self.token.span
+ let recover_missing_comma = arm_body.is_some() || pat.could_be_never_pattern();
+ if recover_missing_comma {
+ result = result.or_else(|err| {
+ // FIXME(compiler-errors): We could also recover `; PAT =>` here
+
+ // Try to parse a following `PAT =>`, if successful
+ // then we should recover.
+ let mut snapshot = this.create_snapshot_for_diagnostic();
+ let pattern_follows = snapshot
+ .parse_pat_allow_top_alt(
+ None,
+ RecoverComma::Yes,
+ RecoverColon::Yes,
+ CommaRecoveryMode::EitherTupleOrPipe,
+ )
+ .map_err(|err| err.cancel())
+ .is_ok();
+ if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
+ err.cancel();
+ this.sess.emit_err(errors::MissingCommaAfterMatchArm {
+ span: arm_span.shrink_to_hi(),
+ });
+ return Ok(true);
+ }
+ Err(err)
+ });
}
+ result?;
Ok((
ast::Arm {
attrs,
pat,
guard,
- body: expr,
- span: lo.to(hi),
+ body: arm_body,
+ span: arm_span,
id: DUMMY_NODE_ID,
is_placeholder: false,
},
@@ -3019,6 +3074,90 @@ impl<'a> Parser<'a> {
})
}
+ fn parse_match_arm_guard(&mut self) -> PResult<'a, Option<P<Expr>>> {
+ // Used to check the `let_chains` and `if_let_guard` features mostly by scanning
+ // `&&` tokens.
+ fn check_let_expr(expr: &Expr) -> (bool, bool) {
+ match &expr.kind {
+ ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
+ let lhs_rslt = check_let_expr(lhs);
+ let rhs_rslt = check_let_expr(rhs);
+ (lhs_rslt.0 || rhs_rslt.0, false)
+ }
+ ExprKind::Let(..) => (true, true),
+ _ => (false, true),
+ }
+ }
+ if !self.eat_keyword(kw::If) {
+ // No match arm guard present.
+ return Ok(None);
+ }
+
+ let if_span = self.prev_token.span;
+ let mut cond = self.parse_match_guard_condition()?;
+
+ CondChecker::new(self).visit_expr(&mut cond);
+
+ let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
+ if has_let_expr {
+ if does_not_have_bin_op {
+ // Remove the last feature gating of a `let` expression since it's stable.
+ self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
+ }
+ let span = if_span.to(cond.span);
+ self.sess.gated_spans.gate(sym::if_let_guard, span);
+ }
+ Ok(Some(cond))
+ }
+
+ fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
+ if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
+ // Detect and recover from `($pat if $cond) => $arm`.
+ let left = self.token.span;
+ match self.parse_pat_allow_top_alt(
+ None,
+ RecoverComma::Yes,
+ RecoverColon::Yes,
+ CommaRecoveryMode::EitherTupleOrPipe,
+ ) {
+ Ok(pat) => Ok((pat, self.parse_match_arm_guard()?)),
+ Err(err)
+ if let prev_sp = self.prev_token.span
+ && let true = self.eat_keyword(kw::If) =>
+ {
+ // We know for certain we've found `($pat if` so far.
+ let mut cond = match self.parse_match_guard_condition() {
+ Ok(cond) => cond,
+ Err(cond_err) => {
+ cond_err.cancel();
+ return Err(err);
+ }
+ };
+ err.cancel();
+ CondChecker::new(self).visit_expr(&mut cond);
+ self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
+ self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+ let right = self.prev_token.span;
+ self.sess.emit_err(errors::ParenthesesInMatchPat {
+ span: vec![left, right],
+ sugg: errors::ParenthesesInMatchPatSugg { left, right },
+ });
+ Ok((self.mk_pat(left.to(prev_sp), ast::PatKind::Wild), Some(cond)))
+ }
+ Err(err) => Err(err),
+ }
+ } else {
+ // Regular parser flow:
+ let pat = self.parse_pat_allow_top_alt(
+ None,
+ RecoverComma::Yes,
+ RecoverColon::Yes,
+ CommaRecoveryMode::EitherTupleOrPipe,
+ )?;
+ Ok((pat, self.parse_match_arm_guard()?))
+ }
+ }
+
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, None).map_err(
|mut err| {
@@ -3054,8 +3193,7 @@ impl<'a> Parser<'a> {
fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
let (attrs, body) = self.parse_inner_attrs_and_block()?;
if self.eat_keyword(kw::Catch) {
- Err(errors::CatchAfterTry { span: self.prev_token.span }
- .into_diagnostic(&self.sess.span_diagnostic))
+ Err(errors::CatchAfterTry { span: self.prev_token.span }.into_diagnostic(self.dcx()))
} else {
let span = span_lo.to(body.span);
self.sess.gated_spans.gate(sym::try_blocks, span);
@@ -3086,34 +3224,45 @@ impl<'a> Parser<'a> {
fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
let kind = if self.eat_keyword(kw::Async) {
- GenBlockKind::Async
+ if self.eat_keyword(kw::Gen) { GenBlockKind::AsyncGen } else { GenBlockKind::Async }
} else {
assert!(self.eat_keyword(kw::Gen));
- self.sess.gated_spans.gate(sym::gen_blocks, lo.to(self.token.span));
GenBlockKind::Gen
};
+ match kind {
+ GenBlockKind::Async => {
+ // `async` blocks are stable
+ }
+ GenBlockKind::Gen | GenBlockKind::AsyncGen => {
+ self.sess.gated_spans.gate(sym::gen_blocks, lo.to(self.prev_token.span));
+ }
+ }
let capture_clause = self.parse_capture_clause()?;
let (attrs, body) = self.parse_inner_attrs_and_block()?;
let kind = ExprKind::Gen(capture_clause, body, kind);
Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
}
- fn is_gen_block(&self, kw: Symbol) -> bool {
- self.token.is_keyword(kw)
+ fn is_gen_block(&self, kw: Symbol, lookahead: usize) -> bool {
+ self.is_keyword_ahead(lookahead, &[kw])
&& ((
// `async move {`
- self.is_keyword_ahead(1, &[kw::Move])
- && self.look_ahead(2, |t| {
+ self.is_keyword_ahead(lookahead + 1, &[kw::Move])
+ && self.look_ahead(lookahead + 2, |t| {
*t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
})
) || (
// `async {`
- self.look_ahead(1, |t| {
+ self.look_ahead(lookahead + 1, |t| {
*t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
})
))
}
+ pub(super) fn is_async_gen_block(&self) -> bool {
+ self.token.is_keyword(kw::Async) && self.is_gen_block(kw::Gen, 1)
+ }
+
fn is_certainly_not_a_block(&self) -> bool {
self.look_ahead(1, |t| t.is_ident())
&& (
@@ -3206,7 +3355,7 @@ impl<'a> Parser<'a> {
if let Some((ident, _)) = self.token.ident()
&& !self.token.is_reserved_ident()
&& self.look_ahead(1, |t| {
- AssocOp::from_token(&t).is_some()
+ AssocOp::from_token(t).is_some()
|| matches!(t.kind, token::OpenDelim(_))
|| t.kind == token::Dot
})
@@ -3386,7 +3535,7 @@ impl<'a> Parser<'a> {
ident_span: this.token.span,
token: this.look_ahead(1, |t| t.clone()),
}
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(&self.sess.dcx));
}
let (ident, expr) = if is_shorthand {
// Mimic `x: x` for the `x` field shorthand.
@@ -3551,6 +3700,14 @@ pub(crate) enum ForbiddenLetReason {
struct CondChecker<'a> {
parser: &'a Parser<'a>,
forbid_let_reason: Option<ForbiddenLetReason>,
+ missing_let: Option<errors::MaybeMissingLet>,
+ comparison: Option<errors::MaybeComparison>,
+}
+
+impl<'a> CondChecker<'a> {
+ fn new(parser: &'a Parser<'a>) -> Self {
+ CondChecker { parser, forbid_let_reason: None, missing_let: None, comparison: None }
+ }
}
impl MutVisitor for CondChecker<'_> {
@@ -3561,11 +3718,13 @@ impl MutVisitor for CondChecker<'_> {
match e.kind {
ExprKind::Let(_, _, _, ref mut is_recovered @ None) => {
if let Some(reason) = self.forbid_let_reason {
- *is_recovered = Some(
- self.parser
- .sess
- .emit_err(errors::ExpectedExpressionFoundLet { span, reason }),
- );
+ *is_recovered =
+ Some(self.parser.sess.emit_err(errors::ExpectedExpressionFoundLet {
+ span,
+ reason,
+ missing_let: self.missing_let,
+ comparison: self.comparison,
+ }));
} else {
self.parser.sess.gated_spans.gate(sym::let_chains, span);
}
@@ -3589,9 +3748,28 @@ impl MutVisitor for CondChecker<'_> {
noop_visit_expr(e, self);
self.forbid_let_reason = forbid_let_reason;
}
+ ExprKind::Assign(ref lhs, _, span) => {
+ let forbid_let_reason = self.forbid_let_reason;
+ self.forbid_let_reason = Some(OtherForbidden);
+ let missing_let = self.missing_let;
+ if let ExprKind::Binary(_, _, rhs) = &lhs.kind
+ && let ExprKind::Path(_, _)
+ | ExprKind::Struct(_)
+ | ExprKind::Call(_, _)
+ | ExprKind::Array(_) = rhs.kind
+ {
+ self.missing_let =
+ Some(errors::MaybeMissingLet { span: rhs.span.shrink_to_lo() });
+ }
+ let comparison = self.comparison;
+ self.comparison = Some(errors::MaybeComparison { span: span.shrink_to_hi() });
+ noop_visit_expr(e, self);
+ self.forbid_let_reason = forbid_let_reason;
+ self.missing_let = missing_let;
+ self.comparison = comparison;
+ }
ExprKind::Unary(_, _)
| ExprKind::Await(_, _)
- | ExprKind::Assign(_, _, _)
| ExprKind::AssignOp(_, _, _)
| ExprKind::Range(_, _, _)
| ExprKind::Try(_)
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 242c9d332..20f67b284 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -279,7 +279,7 @@ impl<'a> Parser<'a> {
let span_lo = self.token.span;
let (params, span) = if self.eat_lt() {
let params = self.parse_generic_params()?;
- self.expect_gt()?;
+ self.expect_gt_or_maybe_suggest_closing_generics(&params)?;
(params, span_lo.to(self.prev_token.span))
} else {
(ThinVec::new(), self.prev_token.span.shrink_to_hi())
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 801860c21..09ee042ef 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -3,18 +3,12 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent;
-use ast::StaticItem;
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::util::case::Case;
-use rustc_ast::MacCall;
-use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
-use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
-use rustc_ast::{BindingAnnotation, Block, FnDecl, FnSig, Param, SelfKind};
-use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, VariantData};
-use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
+use rustc_ast::{self as ast};
use rustc_ast_pretty::pprust;
use rustc_errors::{
struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
@@ -123,7 +117,7 @@ impl<'a> Parser<'a> {
// Don't use `maybe_whole` so that we have precise control
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind
- && let token::NtItem(item) = &**nt
+ && let token::NtItem(item) = &nt.0
{
let mut item = item.clone();
self.bump();
@@ -444,11 +438,7 @@ impl<'a> Parser<'a> {
None
};
- if let Some(err) = err {
- Err(err.into_diagnostic(&self.sess.span_diagnostic))
- } else {
- Ok(())
- }
+ if let Some(err) = err { Err(err.into_diagnostic(self.dcx())) } else { Ok(()) }
}
fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> {
@@ -769,7 +759,7 @@ impl<'a> Parser<'a> {
if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
// FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585)
struct_span_err!(
- self.diagnostic(),
+ self.dcx(),
self.token.span,
E0584,
"found a documentation comment that doesn't document anything",
@@ -933,7 +923,7 @@ impl<'a> Parser<'a> {
);
let where_predicates_split = before_where_clause.predicates.len();
let mut predicates = before_where_clause.predicates;
- predicates.extend(after_where_clause.predicates.into_iter());
+ predicates.extend(after_where_clause.predicates);
let where_clause = WhereClause {
has_where_token: before_where_clause.has_where_token
|| after_where_clause.has_where_token,
@@ -1143,9 +1133,11 @@ impl<'a> Parser<'a> {
Ok(kind) => kind,
Err(kind) => match kind {
ItemKind::Const(box ConstItem { ty, expr, .. }) => {
+ let const_span = Some(span.with_hi(ident.span.lo()))
+ .filter(|span| span.can_be_used_for_suggestions());
self.sess.emit_err(errors::ExternItemCannotBeConst {
ident_span: ident.span,
- const_span: span.with_hi(ident.span.lo()),
+ const_span,
});
ForeignItemKind::Static(ty, Mutability::Not, expr)
}
@@ -1382,8 +1374,7 @@ impl<'a> Parser<'a> {
let span = self.prev_token.span.shrink_to_hi();
let err: DiagnosticBuilder<'_, ErrorGuaranteed> =
- errors::MissingConstType { span, colon, kind }
- .into_diagnostic(&self.sess.span_diagnostic);
+ errors::MissingConstType { span, colon, kind }.into_diagnostic(self.dcx());
err.stash(span, StashKey::ItemNoType);
// The user intended that the type be inferred,
@@ -1400,7 +1391,7 @@ impl<'a> Parser<'a> {
self.bump();
self.sess.emit_err(err);
} else {
- return Err(err.into_diagnostic(&self.sess.span_diagnostic));
+ return Err(err.into_diagnostic(self.dcx()));
}
}
@@ -1415,8 +1406,8 @@ impl<'a> Parser<'a> {
self.bump();
(thin_vec![], false)
} else {
- self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant()).map_err(
- |mut err| {
+ self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
+ .map_err(|mut err| {
err.span_label(id.span, "while parsing this enum");
if self.token == token::Colon {
let snapshot = self.create_snapshot_for_diagnostic();
@@ -1436,20 +1427,22 @@ impl<'a> Parser<'a> {
}
self.restore_snapshot(snapshot);
}
- self.recover_stmt();
+ self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+ self.bump(); // }
err
- },
- )?
+ })?
};
let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
Ok((id, ItemKind::Enum(enum_definition, generics)))
}
- fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
+ fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
self.recover_diff_marker();
let variant_attrs = self.parse_outer_attributes()?;
self.recover_diff_marker();
+ let help = "enum variants can be `Variant`, `Variant = <integer>`, \
+ `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
self.collect_tokens_trailing_token(
variant_attrs,
ForceCollect::No,
@@ -1476,10 +1469,39 @@ impl<'a> Parser<'a> {
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
// Parse a struct variant.
let (fields, recovered) =
- this.parse_record_struct_body("struct", ident.span, false)?;
- VariantData::Struct(fields, recovered)
+ match this.parse_record_struct_body("struct", ident.span, false) {
+ Ok((fields, recovered)) => (fields, recovered),
+ Err(mut err) => {
+ if this.token == token::Colon {
+ // We handle `enum` to `struct` suggestion in the caller.
+ return Err(err);
+ }
+ this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+ this.bump(); // }
+ err.span_label(span, "while parsing this enum");
+ err.help(help);
+ err.emit();
+ (thin_vec![], true)
+ }
+ };
+ VariantData::Struct { fields, recovered }
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
- VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
+ let body = match this.parse_tuple_struct_body() {
+ Ok(body) => body,
+ Err(mut err) => {
+ if this.token == token::Colon {
+ // We handle `enum` to `struct` suggestion in the caller.
+ return Err(err);
+ }
+ this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
+ this.bump(); // )
+ err.span_label(span, "while parsing this enum");
+ err.help(help);
+ err.emit();
+ thin_vec![]
+ }
+ };
+ VariantData::Tuple(body, DUMMY_NODE_ID)
} else {
VariantData::Unit(DUMMY_NODE_ID)
};
@@ -1500,8 +1522,9 @@ impl<'a> Parser<'a> {
Ok((Some(vr), TrailingToken::MaybeComma))
},
- ).map_err(|mut err| {
- err.help("enum variants can be `Variant`, `Variant = <integer>`, `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`");
+ )
+ .map_err(|mut err| {
+ err.help(help);
err
})
}
@@ -1546,7 +1569,7 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
- VariantData::Struct(fields, recovered)
+ VariantData::Struct { fields, recovered }
}
// No `where` so: `struct Foo<T>;`
} else if self.eat(&token::Semi) {
@@ -1558,7 +1581,7 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
- VariantData::Struct(fields, recovered)
+ VariantData::Struct { fields, recovered }
// Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
@@ -1568,7 +1591,7 @@ impl<'a> Parser<'a> {
} else {
let err =
errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
- return Err(err.into_diagnostic(&self.sess.span_diagnostic));
+ return Err(err.into_diagnostic(self.dcx()));
};
Ok((class_name, ItemKind::Struct(vdata, generics)))
@@ -1587,14 +1610,14 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
- VariantData::Struct(fields, recovered)
+ VariantData::Struct { fields, recovered }
} else if self.token == token::OpenDelim(Delimiter::Brace) {
let (fields, recovered) = self.parse_record_struct_body(
"union",
class_name.span,
generics.where_clause.has_where_token,
)?;
- VariantData::Struct(fields, recovered)
+ VariantData::Struct { fields, recovered }
} else {
let token_str = super::token_descr(&self.token);
let msg = format!("expected `where` or `{{` after union name, found {token_str}");
@@ -1764,7 +1787,7 @@ impl<'a> Parser<'a> {
let sp = previous_span.shrink_to_hi();
err.missing_comma = Some(sp);
}
- return Err(err.into_diagnostic(&self.sess.span_diagnostic));
+ return Err(err.into_diagnostic(self.dcx()));
}
}
_ => {
@@ -1814,7 +1837,7 @@ impl<'a> Parser<'a> {
// Make sure an error was emitted (either by recovering an angle bracket,
// or by finding an identifier as the next token), since we're
// going to continue parsing
- assert!(self.sess.span_diagnostic.has_errors().is_some());
+ assert!(self.dcx().has_errors().is_some());
} else {
return Err(err);
}
@@ -2271,7 +2294,7 @@ impl<'a> Parser<'a> {
} else {
&[token::Semi, token::OpenDelim(Delimiter::Brace)]
};
- if let Err(mut err) = self.expected_one_of_not_found(&[], &expected) {
+ if let Err(mut err) = self.expected_one_of_not_found(&[], expected) {
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
// the AST for typechecking.
@@ -2330,8 +2353,10 @@ impl<'a> Parser<'a> {
|| case == Case::Insensitive
&& t.is_non_raw_ident_where(|i| quals.iter().any(|qual| qual.as_str() == i.name.as_str().to_lowercase()))
)
- // Rule out unsafe extern block.
- && !self.is_unsafe_foreign_mod())
+ // Rule out `unsafe extern {`.
+ && !self.is_unsafe_foreign_mod()
+ // Rule out `async gen {` and `async gen move {`
+ && !self.is_async_gen_block())
})
// `extern ABI fn`
|| self.check_keyword_case(kw::Extern, case)
@@ -2363,10 +2388,7 @@ impl<'a> Parser<'a> {
let constness = self.parse_constness(case);
let async_start_sp = self.token.span;
- let asyncness = self.parse_asyncness(case);
-
- let _gen_start_sp = self.token.span;
- let genness = self.parse_genness(case);
+ let coroutine_kind = self.parse_coroutine_kind(case);
let unsafe_start_sp = self.token.span;
let unsafety = self.parse_unsafety(case);
@@ -2374,7 +2396,7 @@ impl<'a> Parser<'a> {
let ext_start_sp = self.token.span;
let ext = self.parse_extern(case);
- if let Async::Yes { span, .. } = asyncness {
+ if let Some(CoroutineKind::Async { span, .. }) = coroutine_kind {
if span.is_rust_2015() {
self.sess.emit_err(errors::AsyncFnIn2015 {
span,
@@ -2383,8 +2405,11 @@ impl<'a> Parser<'a> {
}
}
- if let Gen::Yes { span, .. } = genness {
- self.sess.emit_err(errors::GenFn { span });
+ match coroutine_kind {
+ Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
+ self.sess.gated_spans.gate(sym::gen_blocks, span);
+ }
+ Some(CoroutineKind::Async { .. }) | None => {}
}
if !self.eat_keyword_case(kw::Fn, case) {
@@ -2403,7 +2428,7 @@ impl<'a> Parser<'a> {
// We may be able to recover
let mut recover_constness = constness;
- let mut recover_asyncness = asyncness;
+ let mut recover_coroutine_kind = coroutine_kind;
let mut recover_unsafety = unsafety;
// This will allow the machine fix to directly place the keyword in the correct place or to indicate
// that the keyword is already present and the second instance should be removed.
@@ -2416,14 +2441,28 @@ impl<'a> Parser<'a> {
}
}
} else if self.check_keyword(kw::Async) {
- match asyncness {
- Async::Yes { span, .. } => Some(WrongKw::Duplicated(span)),
- Async::No => {
- recover_asyncness = Async::Yes {
+ match coroutine_kind {
+ Some(CoroutineKind::Async { span, .. }) => {
+ Some(WrongKw::Duplicated(span))
+ }
+ Some(CoroutineKind::AsyncGen { span, .. }) => {
+ Some(WrongKw::Duplicated(span))
+ }
+ Some(CoroutineKind::Gen { .. }) => {
+ recover_coroutine_kind = Some(CoroutineKind::AsyncGen {
+ span: self.token.span,
+ closure_id: DUMMY_NODE_ID,
+ return_impl_trait_id: DUMMY_NODE_ID,
+ });
+ // FIXME(gen_blocks): This span is wrong, didn't want to think about it.
+ Some(WrongKw::Misplaced(unsafe_start_sp))
+ }
+ None => {
+ recover_coroutine_kind = Some(CoroutineKind::Async {
span: self.token.span,
closure_id: DUMMY_NODE_ID,
return_impl_trait_id: DUMMY_NODE_ID,
- };
+ });
Some(WrongKw::Misplaced(unsafe_start_sp))
}
}
@@ -2504,6 +2543,8 @@ impl<'a> Parser<'a> {
}
}
+ // FIXME(gen_blocks): add keyword recovery logic for genness
+
if wrong_kw.is_some()
&& self.may_recover()
&& self.look_ahead(1, |tok| tok.is_keyword_case(kw::Fn, case))
@@ -2515,7 +2556,7 @@ impl<'a> Parser<'a> {
return Ok(FnHeader {
constness: recover_constness,
unsafety: recover_unsafety,
- asyncness: recover_asyncness,
+ coroutine_kind: recover_coroutine_kind,
ext,
});
}
@@ -2525,7 +2566,7 @@ impl<'a> Parser<'a> {
}
}
- Ok(FnHeader { constness, unsafety, asyncness, ext })
+ Ok(FnHeader { constness, unsafety, coroutine_kind, ext })
}
/// Parses the parameter list and result type of a function declaration.
@@ -2750,7 +2791,7 @@ impl<'a> Parser<'a> {
fn is_named_param(&self) -> bool {
let offset = match &self.token.kind {
- token::Interpolated(nt) => match **nt {
+ token::Interpolated(nt) => match &nt.0 {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
},
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 1a7ae4069..b91432f10 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -11,7 +11,6 @@ mod stmt;
mod ty;
use crate::lexer::UnmatchedDelim;
-use ast::Gen;
pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason;
@@ -21,13 +20,14 @@ pub use path::PathStyle;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
use rustc_ast::util::case::Case;
use rustc_ast::AttrId;
+use rustc_ast::CoroutineKind;
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, Const, DelimArgs, Extern};
-use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, Mutability, StrLit};
+use rustc_ast::{AttrArgs, AttrArgsEq, Expr, ExprKind, Mutability, StrLit};
use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
@@ -93,7 +93,7 @@ pub enum TrailingToken {
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = &$p.token.kind {
- if let token::$constructor(x) = &**nt {
+ if let token::$constructor(x) = &nt.0 {
let $x = x.clone();
$p.bump();
return Ok($e);
@@ -107,15 +107,15 @@ macro_rules! maybe_whole {
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery
- && $self.may_recover()
- && $self.look_ahead(1, |t| t == &token::ModSep)
- && let token::Interpolated(nt) = &$self.token.kind
- && let token::NtTy(ty) = &**nt
- {
- let ty = ty.clone();
- $self.bump();
- return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
- }
+ && $self.may_recover()
+ && $self.look_ahead(1, |t| t == &token::ModSep)
+ && let token::Interpolated(nt) = &$self.token.kind
+ && let token::NtTy(ty) = &nt.0
+ {
+ let ty = ty.clone();
+ $self.bump();
+ return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
+ }
};
}
@@ -130,7 +130,7 @@ pub struct Parser<'a> {
pub sess: &'a ParseSess,
/// The current token.
pub token: Token,
- /// The spacing for the current token
+ /// The spacing for the current token.
pub token_spacing: Spacing,
/// The previous token.
pub prev_token: Token,
@@ -240,7 +240,7 @@ struct TokenCursor {
// Token streams surrounding the current one. The delimiters for stack[n]'s
// tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
// because it's the outermost token stream which never has delimiters.
- stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
+ stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
}
impl TokenCursor {
@@ -264,24 +264,31 @@ impl TokenCursor {
));
return (token.clone(), spacing);
}
- &TokenTree::Delimited(sp, delim, ref tts) => {
+ &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
let trees = tts.clone().into_trees();
- self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
+ self.stack.push((
+ mem::replace(&mut self.tree_cursor, trees),
+ sp,
+ spacing,
+ delim,
+ ));
if delim != Delimiter::Invisible {
- return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
+ return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
}
// No open delimiter to return; continue on to the next iteration.
}
};
- } else if let Some((tree_cursor, delim, span)) = self.stack.pop() {
+ } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
// We have exhausted this token stream. Move back to its parent token stream.
self.tree_cursor = tree_cursor;
if delim != Delimiter::Invisible {
- return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
+ return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
}
// No close delimiter to return; continue on to the next iteration.
} else {
- // We have exhausted the outermost token stream.
+ // We have exhausted the outermost token stream. The use of
+ // `Spacing::Alone` is arbitrary and immaterial, because the
+ // `Eof` token's spacing is never used.
return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
}
}
@@ -367,12 +374,14 @@ impl TokenDescription {
pub(super) fn token_descr(token: &Token) -> String {
let name = pprust::token_to_string(token).to_string();
- let kind = TokenDescription::from_token(token).map(|kind| match kind {
- TokenDescription::ReservedIdentifier => "reserved identifier",
- TokenDescription::Keyword => "keyword",
- TokenDescription::ReservedKeyword => "reserved keyword",
- TokenDescription::DocComment => "doc comment",
- });
+ let kind = match (TokenDescription::from_token(token), &token.kind) {
+ (Some(TokenDescription::ReservedIdentifier), _) => Some("reserved identifier"),
+ (Some(TokenDescription::Keyword), _) => Some("keyword"),
+ (Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"),
+ (Some(TokenDescription::DocComment), _) => Some("doc comment"),
+ (None, TokenKind::Interpolated(node)) => Some(node.0.descr()),
+ (None, _) => None,
+ };
if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") }
}
@@ -662,7 +671,7 @@ impl<'a> Parser<'a> {
fn check_inline_const(&self, dist: usize) -> bool {
self.is_keyword_ahead(dist, &[kw::Const])
&& self.look_ahead(dist + 1, |t| match &t.kind {
- token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
+ token::Interpolated(nt) => matches!(&nt.0, token::NtBlock(..)),
token::OpenDelim(Delimiter::Brace) => true,
_ => false,
})
@@ -697,8 +706,8 @@ impl<'a> Parser<'a> {
// is not needed (we'll capture the entire 'glued' token),
// and `bump` will set this field to `None`
self.break_last_token = true;
- // Use the spacing of the glued token as the spacing
- // of the unglued second token.
+ // Use the spacing of the glued token as the spacing of the
+ // unglued second token.
self.bump_with((Token::new(second, second_span), self.token_spacing));
true
}
@@ -830,8 +839,8 @@ impl<'a> Parser<'a> {
// https://github.com/rust-lang/rust/issues/72373
if self.prev_token.is_ident() && self.token.kind == token::DotDot {
let msg = format!(
- "if you meant to bind the contents of \
- the rest of the array pattern into `{}`, use `@`",
+ "if you meant to bind the contents of the rest of the array \
+ pattern into `{}`, use `@`",
pprust::token_to_string(&self.prev_token)
);
expect_err
@@ -873,6 +882,9 @@ impl<'a> Parser<'a> {
if self.token == token::Colon {
// we will try to recover in `maybe_recover_struct_lit_bad_delims`
return Err(expect_err);
+ } else if let [token::CloseDelim(Delimiter::Parenthesis)] = kets
+ {
+ return Err(expect_err);
} else {
expect_err.emit();
break;
@@ -1063,7 +1075,7 @@ impl<'a> Parser<'a> {
return looker(&self.token);
}
- if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
+ if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
&& delim != Delimiter::Invisible
{
// We are not in the outermost token stream, and the token stream
@@ -1072,7 +1084,7 @@ impl<'a> Parser<'a> {
let tree_cursor = &self.token_cursor.tree_cursor;
let all_normal = (0..dist).all(|i| {
let token = tree_cursor.look_ahead(i);
- !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
+ !matches!(token, Some(TokenTree::Delimited(.., Delimiter::Invisible, _)))
});
if all_normal {
// There were no skipped delimiters. Do lookahead by plain indexing.
@@ -1081,7 +1093,7 @@ impl<'a> Parser<'a> {
// Indexing stayed within the current token stream.
match tree {
TokenTree::Token(token, _) => looker(token),
- TokenTree::Delimited(dspan, delim, _) => {
+ TokenTree::Delimited(dspan, _, delim, _) => {
looker(&Token::new(token::OpenDelim(*delim), dspan.open))
}
}
@@ -1115,27 +1127,42 @@ impl<'a> Parser<'a> {
}
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
- fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
+ pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
}
/// Parses asyncness: `async` or nothing.
- fn parse_asyncness(&mut self, case: Case) -> Async {
+ fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
+ let span = self.token.uninterpolated_span();
if self.eat_keyword_case(kw::Async, case) {
- let span = self.prev_token.uninterpolated_span();
- Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
- } else {
- Async::No
- }
- }
-
- /// Parses genness: `gen` or nothing.
- fn parse_genness(&mut self, case: Case) -> Gen {
- if self.token.span.at_least_rust_2024() && self.eat_keyword_case(kw::Gen, case) {
- let span = self.prev_token.uninterpolated_span();
- Gen::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
+ // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
+ // error if edition <= 2024, like we do with async and edition <= 2018?
+ if self.token.uninterpolated_span().at_least_rust_2024()
+ && self.eat_keyword_case(kw::Gen, case)
+ {
+ let gen_span = self.prev_token.uninterpolated_span();
+ Some(CoroutineKind::AsyncGen {
+ span: span.to(gen_span),
+ closure_id: DUMMY_NODE_ID,
+ return_impl_trait_id: DUMMY_NODE_ID,
+ })
+ } else {
+ Some(CoroutineKind::Async {
+ span,
+ closure_id: DUMMY_NODE_ID,
+ return_impl_trait_id: DUMMY_NODE_ID,
+ })
+ }
+ } else if self.token.uninterpolated_span().at_least_rust_2024()
+ && self.eat_keyword_case(kw::Gen, case)
+ {
+ Some(CoroutineKind::Gen {
+ span,
+ closure_id: DUMMY_NODE_ID,
+ return_impl_trait_id: DUMMY_NODE_ID,
+ })
} else {
- Gen::No
+ None
}
}
@@ -1244,7 +1271,7 @@ impl<'a> Parser<'a> {
|| self.check(&token::OpenDelim(Delimiter::Brace));
delimited.then(|| {
- let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else {
+ let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
unreachable!()
};
DelimArgs { dspan, delim, tokens }
@@ -1268,7 +1295,7 @@ impl<'a> Parser<'a> {
token::OpenDelim(..) => {
// Grab the tokens within the delimiters.
let stream = self.token_cursor.tree_cursor.stream.clone();
- let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
+ let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
// Advance the token cursor through the entire delimited
// sequence. After getting the `OpenDelim` we are *within* the
@@ -1288,12 +1315,13 @@ impl<'a> Parser<'a> {
// Consume close delimiter
self.bump();
- TokenTree::Delimited(span, delim, stream)
+ TokenTree::Delimited(span, spacing, delim, stream)
}
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
+ let prev_spacing = self.token_spacing;
self.bump();
- TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
+ TokenTree::Token(self.prev_token.clone(), prev_spacing)
}
}
}
@@ -1479,7 +1507,7 @@ pub(crate) fn make_unclosed_delims_error(
opening_candidate: unmatched.candidate_span,
unclosed: unmatched.unclosed_span,
}
- .into_diagnostic(&sess.span_diagnostic);
+ .into_diagnostic(&sess.dcx);
Some(err)
}
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 025b0615a..301a88cd0 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -50,12 +50,12 @@ impl<'a> Parser<'a> {
NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
NonterminalKind::Vis => match token.kind {
// The follow-set of :vis + "priv" keyword + interpolated
- token::Comma | token::Ident(..) | token::Interpolated(..) => true,
+ token::Comma | token::Ident(..) | token::Interpolated(_) => true,
_ => token.can_begin_type(),
},
NonterminalKind::Block => match &token.kind {
token::OpenDelim(Delimiter::Brace) => true,
- token::Interpolated(nt) => match **nt {
+ token::Interpolated(nt) => match &nt.0 {
NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_)
| NtVis(_) => false,
@@ -64,7 +64,7 @@ impl<'a> Parser<'a> {
},
NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
token::ModSep | token::Ident(..) => true,
- token::Interpolated(nt) => may_be_ident(nt),
+ token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false,
},
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
@@ -75,7 +75,7 @@ impl<'a> Parser<'a> {
token::BinOp(token::And) | // reference
token::BinOp(token::Minus) | // negative literal
token::AndAnd | // double reference
- token::Literal(..) | // literal
+ token::Literal(_) | // literal
token::DotDot | // range pattern (future compat)
token::DotDotDot | // range pattern (future compat)
token::ModSep | // path
@@ -83,14 +83,14 @@ impl<'a> Parser<'a> {
token::BinOp(token::Shl) => true, // path (double UFCS)
// leading vert `|` or-pattern
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
- token::Interpolated(nt) => may_be_ident(nt),
+ token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false,
}
}
NonterminalKind::Lifetime => match &token.kind {
token::Lifetime(_) => true,
token::Interpolated(nt) => {
- matches!(**nt, NtLifetime(_))
+ matches!(&nt.0, NtLifetime(_))
}
_ => false,
},
@@ -114,8 +114,9 @@ impl<'a> Parser<'a> {
NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
Some(item) => NtItem(item),
None => {
- return Err(UnexpectedNonterminal::Item(self.token.span)
- .into_diagnostic(&self.sess.span_diagnostic));
+ return Err(
+ UnexpectedNonterminal::Item(self.token.span).into_diagnostic(self.dcx())
+ );
}
},
NonterminalKind::Block => {
@@ -127,7 +128,7 @@ impl<'a> Parser<'a> {
Some(s) => NtStmt(P(s)),
None => {
return Err(UnexpectedNonterminal::Statement(self.token.span)
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(self.dcx()));
}
},
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
@@ -163,7 +164,7 @@ impl<'a> Parser<'a> {
span: self.token.span,
token: self.token.clone(),
}
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(self.dcx()));
}
NonterminalKind::Path => {
NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?))
@@ -181,7 +182,7 @@ impl<'a> Parser<'a> {
span: self.token.span,
token: self.token.clone(),
}
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(self.dcx()));
}
}
};
@@ -191,7 +192,7 @@ impl<'a> Parser<'a> {
panic!(
"Missing tokens for nt {:?} at {:?}: {:?}",
nt,
- nt.span(),
+ nt.use_span(),
pprust::nonterminal_to_string(&nt)
);
}
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 0a4c7c17d..80233eddb 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -5,8 +5,8 @@ use crate::errors::{
ExpectedCommaAfterPatternField, GenericArgsInPatRequireTurbofishSyntax,
InclusiveRangeExtraEquals, InclusiveRangeMatchArrow, InclusiveRangeNoEnd, InvalidMutInPattern,
PatternOnWrongSideOfAt, RefMutOrderIncorrect, RemoveLet, RepeatedMutInPattern,
- TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg, TrailingVertNotAllowed,
- UnexpectedLifetimeInPattern, UnexpectedVertVertBeforeFunctionParam,
+ SwitchRefBoxOrder, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg,
+ TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedVertVertBeforeFunctionParam,
UnexpectedVertVertInPattern,
};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
@@ -141,13 +141,21 @@ impl<'a> Parser<'a> {
};
// Parse the first pattern (`p_0`).
- let mut first_pat = self.parse_pat_no_top_alt(expected, syntax_loc)?;
- if rc == RecoverComma::Yes {
- self.maybe_recover_unexpected_comma(
- first_pat.span,
- matches!(first_pat.kind, PatKind::MacCall(_)),
- rt,
- )?;
+ let mut first_pat = match self.parse_pat_no_top_alt(expected, syntax_loc) {
+ Ok(pat) => pat,
+ Err(mut err)
+ if self.token.is_reserved_ident()
+ && !self.token.is_keyword(kw::In)
+ && !self.token.is_keyword(kw::If) =>
+ {
+ err.emit();
+ self.bump();
+ self.mk_pat(self.token.span, PatKind::Wild)
+ }
+ Err(err) => return Err(err),
+ };
+ if rc == RecoverComma::Yes && !first_pat.could_be_never_pattern() {
+ self.maybe_recover_unexpected_comma(first_pat.span, rt)?;
}
// If the next token is not a `|`,
@@ -188,8 +196,8 @@ impl<'a> Parser<'a> {
err.span_label(lo, WHILE_PARSING_OR_MSG);
err
})?;
- if rc == RecoverComma::Yes {
- self.maybe_recover_unexpected_comma(pat.span, false, rt)?;
+ if rc == RecoverComma::Yes && !pat.could_be_never_pattern() {
+ self.maybe_recover_unexpected_comma(pat.span, rt)?;
}
pats.push(pat);
}
@@ -368,12 +376,22 @@ impl<'a> Parser<'a> {
self.recover_dotdotdot_rest_pat(lo)
} else if let Some(form) = self.parse_range_end() {
self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
+ } else if self.eat(&token::Not) {
+ // Parse `!`
+ self.sess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
+ PatKind::Never
} else if self.eat_keyword(kw::Underscore) {
- // Parse _
+ // Parse `_`
PatKind::Wild
} else if self.eat_keyword(kw::Mut) {
self.parse_pat_ident_mut(syntax_loc)?
} else if self.eat_keyword(kw::Ref) {
+ if self.check_keyword(kw::Box) {
+ // Suggest `box ref`.
+ let span = self.prev_token.span.to(self.token.span);
+ self.bump();
+ self.sess.emit_err(SwitchRefBoxOrder { span });
+ }
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = self.parse_mutability();
self.parse_pat_ident(BindingAnnotation(ByRef::Yes, mutbl), syntax_loc)?
@@ -541,7 +559,7 @@ impl<'a> Parser<'a> {
}
self.sess
- .emit_err(AmbiguousRangePattern { span: pat.span, pat: pprust::pat_to_string(&pat) });
+ .emit_err(AmbiguousRangePattern { span: pat.span, pat: pprust::pat_to_string(pat) });
}
/// Parse `&pat` / `&mut pat`.
@@ -592,7 +610,7 @@ impl<'a> Parser<'a> {
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
if let token::Interpolated(nt) = &self.token.kind {
- if let token::NtPat(_) = **nt {
+ if let token::NtPat(..) = &nt.0 {
self.expected_ident_found_err().emit();
}
}
@@ -638,13 +656,13 @@ impl<'a> Parser<'a> {
/// Error on `mut $pat` where `$pat` is not an ident.
fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) {
- let span = lo.to(pat.span);
- let pat = pprust::pat_to_string(&pat);
-
self.sess.emit_err(if changed_any_binding {
- InvalidMutInPattern::NestedIdent { span, pat }
+ InvalidMutInPattern::NestedIdent {
+ span: lo.to(pat.span),
+ pat: pprust::pat_to_string(pat),
+ }
} else {
- InvalidMutInPattern::NonIdent { span, pat }
+ InvalidMutInPattern::NonIdent { span: lo.until(pat.span) }
});
}
@@ -829,7 +847,7 @@ impl<'a> Parser<'a> {
binding_annotation: BindingAnnotation,
syntax_loc: Option<PatternLocation>,
) -> PResult<'a, PatKind> {
- let ident = self.parse_ident()?;
+ let ident = self.parse_ident_common(false)?;
if self.may_recover()
&& !matches!(syntax_loc, Some(PatternLocation::FunctionParameter))
@@ -855,7 +873,7 @@ impl<'a> Parser<'a> {
// will direct us over to `parse_enum_variant()`.
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
return Err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }
- .into_diagnostic(&self.sess.span_diagnostic));
+ .into_diagnostic(self.dcx()));
}
Ok(PatKind::Ident(binding_annotation, ident, sub))
@@ -969,7 +987,7 @@ impl<'a> Parser<'a> {
// check that a comma comes after every field
if !ate_comma {
let mut err = ExpectedCommaAfterPatternField { span: self.token.span }
- .into_diagnostic(&self.sess.span_diagnostic);
+ .into_diagnostic(self.dcx());
if let Some(mut delayed) = delayed_err {
delayed.emit();
}
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 8626dbe40..3b92a9119 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -123,7 +123,7 @@ impl<'a> Parser<'a> {
self.bump(); // colon
- self.diagnostic()
+ self.dcx()
.struct_span_err(
self.prev_token.span,
"found single colon before projection in qualified path",
@@ -185,7 +185,7 @@ impl<'a> Parser<'a> {
});
if let token::Interpolated(nt) = &self.token.kind {
- if let token::NtTy(ty) = &**nt {
+ if let token::NtTy(ty) = &nt.0 {
if let ast::TyKind::Path(None, path) = &ty.kind {
let path = path.clone();
self.bump();
@@ -326,7 +326,7 @@ impl<'a> Parser<'a> {
.is_nightly_build()
.then_some(()),
}
- .into_diagnostic(self.diagnostic());
+ .into_diagnostic(self.dcx());
}
// Attempt to find places where a missing `>` might belong.
else if let Some(arg) = args
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index aa939a71d..1ee5a96d5 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -53,7 +53,7 @@ impl<'a> Parser<'a> {
// Don't use `maybe_whole` so that we have precise control
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind
- && let token::NtStmt(stmt) = &**nt
+ && let token::NtStmt(stmt) = &nt.0
{
let mut stmt = stmt.clone();
self.bump();
@@ -384,10 +384,10 @@ impl<'a> Parser<'a> {
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
if let ast::ExprKind::Binary(op, ..) = init.kind {
- if op.node.lazy() {
+ if op.node.is_lazy() {
self.sess.emit_err(errors::InvalidExpressionInLetElse {
span: init.span,
- operator: op.node.to_string(),
+ operator: op.node.as_str(),
sugg: errors::WrapExpressionInParentheses {
left: init.span.shrink_to_lo(),
right: init.span.shrink_to_hi(),
@@ -567,20 +567,37 @@ impl<'a> Parser<'a> {
snapshot.recover_diff_marker();
}
if self.token == token::Colon {
- // if next token is following a colon, it's likely a path
- // and we can suggest a path separator
- self.bump();
- if self.token.span.lo() == self.prev_token.span.hi() {
+ // if a previous and next token of the current one is
+ // integer literal (e.g. `1:42`), it's likely a range
+ // expression for Pythonistas and we can suggest so.
+ if self.prev_token.is_integer_lit()
+ && self.may_recover()
+ && self.look_ahead(1, |token| token.is_integer_lit())
+ {
+ // FIXME(hkmatsumoto): Might be better to trigger
+ // this only when parsing an index expression.
err.span_suggestion_verbose(
- self.prev_token.span,
- "maybe write a path separator here",
- "::",
+ self.token.span,
+ "you might have meant a range expression",
+ "..",
Applicability::MaybeIncorrect,
);
- }
- if self.sess.unstable_features.is_nightly_build() {
- // FIXME(Nilstrieb): Remove this again after a few months.
- err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>");
+ } else {
+ // if next token is following a colon, it's likely a path
+ // and we can suggest a path separator
+ self.bump();
+ if self.token.span.lo() == self.prev_token.span.hi() {
+ err.span_suggestion_verbose(
+ self.prev_token.span,
+ "maybe write a path separator here",
+ "::",
+ Applicability::MaybeIncorrect,
+ );
+ }
+ if self.sess.unstable_features.is_nightly_build() {
+ // FIXME(Nilstrieb): Remove this again after a few months.
+ err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>");
+ }
}
}
@@ -619,6 +636,20 @@ impl<'a> Parser<'a> {
match &mut stmt.kind {
// Expression without semicolon.
StmtKind::Expr(expr)
+ if classify::expr_requires_semi_to_be_stmt(expr)
+ && !expr.attrs.is_empty()
+ && ![token::Eof, token::Semi, token::CloseDelim(Delimiter::Brace)]
+ .contains(&self.token.kind) =>
+ {
+ // The user has written `#[attr] expr` which is unsupported. (#106020)
+ self.attr_on_non_tail_expr(&expr);
+ // We already emitted an error, so don't emit another type error
+ let sp = expr.span.to(self.prev_token.span);
+ *expr = self.mk_expr_err(sp);
+ }
+
+ // Expression without semicolon.
+ StmtKind::Expr(expr)
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
{
// Just check for errors and recover; do not eat semicolon yet.
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index be2cbaf30..da8cc05ff 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -37,7 +37,7 @@ impl BoundModifiers {
(BoundPolarity::Positive, None) => TraitBoundModifier::None,
(BoundPolarity::Negative(_), None) => TraitBoundModifier::Negative,
(BoundPolarity::Maybe(_), None) => TraitBoundModifier::Maybe,
- (BoundPolarity::Positive, Some(_)) => TraitBoundModifier::MaybeConst,
+ (BoundPolarity::Positive, Some(sp)) => TraitBoundModifier::MaybeConst(sp),
(BoundPolarity::Negative(_), Some(_)) => TraitBoundModifier::MaybeConstNegative,
(BoundPolarity::Maybe(_), Some(_)) => TraitBoundModifier::MaybeConstMaybe,
}
@@ -135,7 +135,7 @@ impl<'a> Parser<'a> {
)
}
- /// Parse a type suitable for a field defintion.
+ /// Parse a type suitable for a field definition.
/// The difference from `parse_ty` is that this version
/// allows anonymous structs and unions.
pub fn parse_ty_for_field_def(&mut self) -> PResult<'a, P<Ty>> {
@@ -287,6 +287,7 @@ impl<'a> Parser<'a> {
// Function pointer type
self.parse_ty_bare_fn(lo, ThinVec::new(), None, recover_return_sign)?
} else if self.check_keyword(kw::For) {
+ let for_span = self.token.span;
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
@@ -299,9 +300,44 @@ impl<'a> Parser<'a> {
recover_return_sign,
)?
} else {
- let path = self.parse_path(PathStyle::Type)?;
- let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
- self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
+ // Try to recover `for<'a> dyn Trait` or `for<'a> impl Trait`.
+ if self.may_recover()
+ && (self.eat_keyword_noexpect(kw::Impl) || self.eat_keyword_noexpect(kw::Dyn))
+ {
+ let kw = self.prev_token.ident().unwrap().0;
+ let removal_span = kw.span.with_hi(self.token.span.lo());
+ let path = self.parse_path(PathStyle::Type)?;
+ let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
+ let kind =
+ self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?;
+ let mut err = self.sess.create_err(errors::TransposeDynOrImpl {
+ span: kw.span,
+ kw: kw.name.as_str(),
+ sugg: errors::TransposeDynOrImplSugg {
+ removal_span,
+ insertion_span: for_span.shrink_to_lo(),
+ kw: kw.name.as_str(),
+ },
+ });
+
+ // Take the parsed bare trait object and turn it either
+ // into a `dyn` object or an `impl Trait`.
+ let kind = match (kind, kw.name) {
+ (TyKind::TraitObject(bounds, _), kw::Dyn) => {
+ TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
+ }
+ (TyKind::TraitObject(bounds, _), kw::Impl) => {
+ TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
+ }
+ _ => return Err(err),
+ };
+ err.emit();
+ kind
+ } else {
+ let path = self.parse_path(PathStyle::Type)?;
+ let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
+ self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
+ }
}
} else if self.eat_keyword(kw::Impl) {
self.parse_impl_ty(&mut impl_dyn_multi)?
@@ -562,7 +598,7 @@ impl<'a> Parser<'a> {
tokens: None,
};
let span_start = self.token.span;
- let ast::FnHeader { ext, unsafety, constness, asyncness } =
+ let ast::FnHeader { ext, unsafety, constness, coroutine_kind } =
self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
if self.may_recover() && self.token.kind == TokenKind::Lt {
self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
@@ -575,9 +611,10 @@ impl<'a> Parser<'a> {
// cover it.
self.sess.emit_err(FnPointerCannotBeConst { span: whole_span, qualifier: span });
}
- if let ast::Async::Yes { span, .. } = asyncness {
+ if let Some(ast::CoroutineKind::Async { span, .. }) = coroutine_kind {
self.sess.emit_err(FnPointerCannotBeAsync { span: whole_span, qualifier: span });
}
+ // FIXME(gen_blocks): emit a similar error for `gen fn()`
let decl_span = span_start.to(self.token.span);
Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl, decl_span })))
}
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index f73965982..9fea38266 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -6,9 +6,9 @@ use rustc_ast::token::Delimiter;
use rustc_ast::tokenstream::DelimSpan;
use rustc_ast::MetaItemKind;
use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MetaItem};
-use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, FatalError, PResult};
use rustc_feature::{AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
+use rustc_session::errors::report_lit_error;
use rustc_session::lint::builtin::ILL_FORMED_ATTRIBUTE_INPUT;
use rustc_session::parse::ParseSess;
use rustc_span::{sym, Span, Symbol};
@@ -51,29 +51,45 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
MetaItemKind::List(nmis)
}
AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => {
- if let ast::ExprKind::Lit(token_lit) = expr.kind
- && let Ok(lit) = ast::MetaItemLit::from_token_lit(token_lit, expr.span)
- {
- if token_lit.suffix.is_some() {
- let mut err = sess.span_diagnostic.struct_span_err(
- expr.span,
- "suffixed literals are not allowed in attributes",
- );
- err.help(
- "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
- use an unsuffixed version (`1`, `1.0`, etc.)",
- );
- return Err(err);
- } else {
- MetaItemKind::NameValue(lit)
- }
+ if let ast::ExprKind::Lit(token_lit) = expr.kind {
+ let res = ast::MetaItemLit::from_token_lit(token_lit, expr.span);
+ let res = match res {
+ Ok(lit) => {
+ if token_lit.suffix.is_some() {
+ let mut err = sess.dcx.struct_span_err(
+ expr.span,
+ "suffixed literals are not allowed in attributes",
+ );
+ err.help(
+ "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
+ use an unsuffixed version (`1`, `1.0`, etc.)",
+ );
+ return Err(err);
+ } else {
+ MetaItemKind::NameValue(lit)
+ }
+ }
+ Err(err) => {
+ report_lit_error(sess, err, token_lit, expr.span);
+ let lit = ast::MetaItemLit {
+ symbol: token_lit.symbol,
+ suffix: token_lit.suffix,
+ kind: ast::LitKind::Err,
+ span: expr.span,
+ };
+ MetaItemKind::NameValue(lit)
+ }
+ };
+ res
} else {
- // The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
- // happen with e.g. `#[foo = include_str!("nonexistent-file.rs")]`; in that
- // case we delay the error because an earlier error will have already been
- // reported.
- let msg = format!("unexpected expression: `{}`", pprust::expr_to_string(expr));
- let mut err = sess.span_diagnostic.struct_span_err(expr.span, msg);
+ // Example cases:
+ // - `#[foo = 1+1]`: results in `ast::ExprKind::BinOp`.
+ // - `#[foo = include_str!("nonexistent-file.rs")]`:
+ // results in `ast::ExprKind::Err`. In that case we delay
+ // the error because an earlier error will have already
+ // been reported.
+ let msg = format!("attribute value must be a literal");
+ let mut err = sess.dcx.struct_span_err(expr.span, msg);
if let ast::ExprKind::Err = expr.kind {
err.downgrade_to_delayed_bug();
}
@@ -186,10 +202,11 @@ fn emit_malformed_attribute(
msg.push_str(&format!("`{code}`"));
suggestions.push(code);
}
+ suggestions.sort();
if should_warn(name) {
- sess.buffer_lint(&ILL_FORMED_ATTRIBUTE_INPUT, span, ast::CRATE_NODE_ID, msg);
+ sess.buffer_lint(ILL_FORMED_ATTRIBUTE_INPUT, span, ast::CRATE_NODE_ID, msg);
} else {
- sess.span_diagnostic
+ sess.dcx
.struct_span_err(span, error_msg)
.span_suggestions(
span,
@@ -198,7 +215,7 @@ fn emit_malformed_attribute(
} else {
"the following are the possible correct uses"
},
- suggestions.into_iter(),
+ suggestions,
Applicability::HasPlaceholders,
)
.emit();