summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs55
-rw-r--r--compiler/rustc_parse/src/lexer/diagnostics.rs8
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs6
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs4
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs4
-rw-r--r--compiler/rustc_parse/src/lib.rs2
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs247
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs65
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs2
-rw-r--r--compiler/rustc_parse/src/parser/item.rs51
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs68
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs10
-rw-r--r--compiler/rustc_parse/src/parser/path.rs40
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs12
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs9
15 files changed, 298 insertions, 285 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 1662db36d..069217165 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -888,12 +888,12 @@ pub(crate) struct InvalidMetaItem {
#[derive(Subdiagnostic)]
#[suggestion(
- parse_sugg_escape_to_use_as_identifier,
+ parse_sugg_escape_identifier,
style = "verbose",
applicability = "maybe-incorrect",
code = "r#"
)]
-pub(crate) struct SuggEscapeToUseAsIdentifier {
+pub(crate) struct SuggEscapeIdentifier {
#[primary_span]
pub span: Span,
pub ident_name: String,
@@ -937,8 +937,9 @@ impl ExpectedIdentifierFound {
pub(crate) struct ExpectedIdentifier {
pub span: Span,
pub token: Token,
- pub suggest_raw: Option<SuggEscapeToUseAsIdentifier>,
+ pub suggest_raw: Option<SuggEscapeIdentifier>,
pub suggest_remove_comma: Option<SuggRemoveComma>,
+ pub help_cannot_start_number: Option<HelpIdentifierStartsWithNumber>,
}
impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
@@ -975,10 +976,21 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
sugg.add_to_diagnostic(&mut diag);
}
+ if let Some(help) = self.help_cannot_start_number {
+ help.add_to_diagnostic(&mut diag);
+ }
+
diag
}
}
+#[derive(Subdiagnostic)]
+#[help(parse_invalid_identifier_with_leading_number)]
+pub(crate) struct HelpIdentifierStartsWithNumber {
+ #[primary_span]
+ pub num_span: Span,
+}
+
pub(crate) struct ExpectedSemi {
pub span: Span,
pub token: Token,
@@ -1208,14 +1220,6 @@ pub(crate) struct SelfParamNotFirst {
}
#[derive(Diagnostic)]
-#[diag(parse_invalid_identifier_with_leading_number)]
-pub(crate) struct InvalidIdentiferStartsWithNumber {
- #[primary_span]
- #[label]
- pub span: Span,
-}
-
-#[derive(Diagnostic)]
#[diag(parse_const_generic_without_braces)]
pub(crate) struct ConstGenericWithoutBraces {
#[primary_span]
@@ -2299,3 +2303,32 @@ impl HelpUseLatestEdition {
}
}
}
+
+#[derive(Diagnostic)]
+#[diag(parse_box_syntax_removed)]
+pub struct BoxSyntaxRemoved<'a> {
+ #[primary_span]
+ #[suggestion(
+ code = "Box::new({code})",
+ applicability = "machine-applicable",
+ style = "verbose"
+ )]
+ pub span: Span,
+ pub code: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_return_type_notation_output)]
+pub(crate) struct BadReturnTypeNotationOutput {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "maybe-incorrect")]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_return_type_notation_dotdot)]
+pub(crate) struct BadReturnTypeNotationDotDot {
+ #[primary_span]
+ #[suggestion(code = "", applicability = "maybe-incorrect")]
+ pub span: Span,
+}
diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs
index 27f4428d3..9e6d27bf0 100644
--- a/compiler/rustc_parse/src/lexer/diagnostics.rs
+++ b/compiler/rustc_parse/src/lexer/diagnostics.rs
@@ -21,7 +21,7 @@ pub struct TokenTreeDiagInfo {
pub matching_block_spans: Vec<(Span, Span)>,
}
-pub fn same_identation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
+pub fn same_indentation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
(Some(open_padding), Some(close_padding)) => open_padding == close_padding,
_ => false,
@@ -67,13 +67,13 @@ pub fn report_suspicious_mismatch_block(
let mut matched_spans: Vec<(Span, bool)> = diag_info
.matching_block_spans
.iter()
- .map(|&(open, close)| (open.with_hi(close.lo()), same_identation_level(sm, open, close)))
+ .map(|&(open, close)| (open.with_hi(close.lo()), same_indentation_level(sm, open, close)))
.collect();
// sort by `lo`, so the large block spans in the front
- matched_spans.sort_by(|a, b| a.0.lo().cmp(&b.0.lo()));
+ matched_spans.sort_by_key(|(span, _)| span.lo());
- // We use larger block whose identation is well to cover those inner mismatched blocks
+ // We use larger block whose indentation is well to cover those inner mismatched blocks
// O(N^2) here, but we are on error reporting path, so it is fine
for i in 0..matched_spans.len() {
let (block_span, same_ident) = matched_spans[i];
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 59958a309..9e856c9f2 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -175,7 +175,7 @@ impl<'a> StringReader<'a> {
if !sym.can_be_raw() {
self.sess.emit_err(errors::CannotBeRawIdent { span, ident: sym });
}
- self.sess.raw_identifier_spans.borrow_mut().push(span);
+ self.sess.raw_identifier_spans.push(span);
token::Ident(sym, true)
}
rustc_lexer::TokenKind::UnknownPrefix => {
@@ -553,8 +553,8 @@ impl<'a> StringReader<'a> {
}
if let Some(possible_offset) = possible_offset {
- let lo = start + BytePos(possible_offset as u32);
- let hi = lo + BytePos(found_terminators as u32);
+ let lo = start + BytePos(possible_offset);
+ let hi = lo + BytePos(found_terminators);
let span = self.mk_sp(lo, hi);
err.span_suggestion(
span,
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index 36fd1e37d..7c2c08951 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -1,5 +1,5 @@
use super::diagnostics::report_suspicious_mismatch_block;
-use super::diagnostics::same_identation_level;
+use super::diagnostics::same_indentation_level;
use super::diagnostics::TokenTreeDiagInfo;
use super::{StringReader, UnmatchedDelim};
use rustc_ast::token::{self, Delimiter, Token};
@@ -153,7 +153,7 @@ impl<'a> TokenTreesReader<'a> {
unclosed_delimiter = Some(sp);
};
for (brace, brace_span) in &self.diag_info.open_braces {
- if same_identation_level(&sm, self.token.span, *brace_span)
+ if same_indentation_level(&sm, self.token.span, *brace_span)
&& brace == &close_delim
{
// high likelihood of these two corresponding
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index d4f971d5b..1f027c08f 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -336,8 +336,8 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
("\"", "Quotation Mark", None),
];
-pub(super) fn check_for_substitution<'a>(
- reader: &StringReader<'a>,
+pub(super) fn check_for_substitution(
+ reader: &StringReader<'_>,
pos: BytePos,
ch: char,
count: usize,
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index d1c3fd0cd..17466cd0e 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -36,7 +36,7 @@ pub mod validate_attr;
mod errors;
-fluent_messages! { "../locales/en-US.ftl" }
+fluent_messages! { "../messages.ftl" }
// A bunch of utility functions of the form `parse_<thing>_from_<source>`
// where <thing> includes crate, expr, item, stmt, tts, and one that
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index a051dbe9f..e03ce5d71 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -6,20 +6,19 @@ use super::{
use crate::errors::{
AmbiguousPlus, AttributeOnParamType, BadQPathStage2, BadTypePlus, BadTypePlusSub,
ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg,
- ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentOnParamType,
- DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
- GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg, InInTypo,
- IncorrectAwait, IncorrectSemicolon, IncorrectUseOfAwait, ParenthesesInForHead,
- ParenthesesInForHeadSugg, PatternMethodParamWithoutBody, QuestionMarkInType,
- QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
- StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens, StructLiteralNeedingParensSugg,
- SuggEscapeToUseAsIdentifier, SuggRemoveComma, UnexpectedConstInGenericParam,
- UnexpectedConstParamDeclaration, UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets,
- UseEqInstead,
+ ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything,
+ DocCommentOnParamType, DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
+ GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
+ HelpIdentifierStartsWithNumber, InInTypo, IncorrectAwait, IncorrectSemicolon,
+ IncorrectUseOfAwait, ParenthesesInForHead, ParenthesesInForHeadSugg,
+ PatternMethodParamWithoutBody, QuestionMarkInType, QuestionMarkInTypeSugg, SelfParamNotFirst,
+ StructLiteralBodyWithoutPath, StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens,
+ StructLiteralNeedingParensSugg, SuggEscapeIdentifier, SuggRemoveComma,
+ UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
+ UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead,
};
use crate::fluent_generated as fluent;
-use crate::lexer::UnmatchedDelim;
use crate::parser;
use rustc_ast as ast;
use rustc_ast::ptr::P;
@@ -39,7 +38,7 @@ use rustc_errors::{
use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident};
-use rustc_span::{Span, SpanSnippetError, DUMMY_SP};
+use rustc_span::{Span, SpanSnippetError, Symbol, DUMMY_SP};
use std::mem::take;
use std::ops::{Deref, DerefMut};
use thin_vec::{thin_vec, ThinVec};
@@ -220,7 +219,6 @@ impl MultiSugg {
/// is dropped.
pub struct SnapshotParser<'a> {
parser: Parser<'a>,
- unclosed_delims: Vec<UnmatchedDelim>,
}
impl<'a> Deref for SnapshotParser<'a> {
@@ -255,34 +253,36 @@ impl<'a> Parser<'a> {
&self.sess.span_diagnostic
}
- /// Replace `self` with `snapshot.parser` and extend `unclosed_delims` with `snapshot.unclosed_delims`.
- /// This is to avoid losing unclosed delims errors `create_snapshot_for_diagnostic` clears.
+ /// Replace `self` with `snapshot.parser`.
pub(super) fn restore_snapshot(&mut self, snapshot: SnapshotParser<'a>) {
*self = snapshot.parser;
- self.unclosed_delims.extend(snapshot.unclosed_delims);
- }
-
- pub fn unclosed_delims(&self) -> &[UnmatchedDelim] {
- &self.unclosed_delims
}
/// Create a snapshot of the `Parser`.
pub fn create_snapshot_for_diagnostic(&self) -> SnapshotParser<'a> {
- let mut snapshot = self.clone();
- let unclosed_delims = self.unclosed_delims.clone();
- // Clear `unclosed_delims` in snapshot to avoid
- // duplicate errors being emitted when the `Parser`
- // is dropped (which may or may not happen, depending
- // if the parsing the snapshot is created for is successful)
- snapshot.unclosed_delims.clear();
- SnapshotParser { parser: snapshot, unclosed_delims }
+ let snapshot = self.clone();
+ SnapshotParser { parser: snapshot }
}
pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
self.sess.source_map().span_to_snippet(span)
}
- pub(super) fn expected_ident_found(&mut self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
+ /// Emits an error with suggestions if an identifier was expected but not found.
+ ///
+ /// Returns a possibly recovered identifier.
+ pub(super) fn expected_ident_found(
+ &mut self,
+ recover: bool,
+ ) -> PResult<'a, (Ident, /* is_raw */ bool)> {
+ if let TokenKind::DocComment(..) = self.prev_token.kind {
+ return Err(DocCommentDoesNotDocumentAnything {
+ span: self.prev_token.span,
+ missing_comma: None,
+ }
+ .into_diagnostic(&self.sess.span_diagnostic));
+ }
+
let valid_follow = &[
TokenKind::Eq,
TokenKind::Colon,
@@ -294,38 +294,61 @@ impl<'a> Parser<'a> {
TokenKind::CloseDelim(Delimiter::Brace),
TokenKind::CloseDelim(Delimiter::Parenthesis),
];
- let suggest_raw = match self.token.ident() {
- Some((ident, false))
- if ident.is_raw_guess()
- && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) =>
- {
- Some(SuggEscapeToUseAsIdentifier {
- span: ident.span.shrink_to_lo(),
- // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
- // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
- ident_name: ident.name.to_string(),
- })
- }
- _ => None,
- };
+
+ let mut recovered_ident = None;
+ // we take this here so that the correct original token is retained in
+ // the diagnostic, regardless of eager recovery.
+ let bad_token = self.token.clone();
+
+ // suggest prepending a keyword in identifier position with `r#`
+ let suggest_raw = if let Some((ident, false)) = self.token.ident()
+ && ident.is_raw_guess()
+ && self.look_ahead(1, |t| valid_follow.contains(&t.kind))
+ {
+ recovered_ident = Some((ident, true));
+
+ // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
+ // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
+ let ident_name = ident.name.to_string();
+
+ Some(SuggEscapeIdentifier {
+ span: ident.span.shrink_to_lo(),
+ ident_name
+ })
+ } else { None };
let suggest_remove_comma =
if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
- Some(SuggRemoveComma { span: self.token.span })
+ if recover {
+ self.bump();
+ recovered_ident = self.ident_or_err(false).ok();
+ };
+
+ Some(SuggRemoveComma { span: bad_token.span })
} else {
None
};
+ let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| {
+ let (invalid, valid) = self.token.span.split_at(len as u32);
+
+ recovered_ident = Some((Ident::new(valid_portion, valid), false));
+
+ HelpIdentifierStartsWithNumber { num_span: invalid }
+ });
+
let err = ExpectedIdentifier {
- span: self.token.span,
- token: self.token.clone(),
+ span: bad_token.span,
+ token: bad_token,
suggest_raw,
suggest_remove_comma,
+ help_cannot_start_number,
};
let mut err = err.into_diagnostic(&self.sess.span_diagnostic);
// if the token we have is a `<`
// it *might* be a misplaced generic
+ // FIXME: could we recover with this?
if self.token == token::Lt {
// all keywords that could have generic applied
let valid_prev_keywords =
@@ -376,7 +399,38 @@ impl<'a> Parser<'a> {
}
}
- err
+ if let Some(recovered_ident) = recovered_ident && recover {
+ err.emit();
+ Ok(recovered_ident)
+ } else {
+ Err(err)
+ }
+ }
+
+ pub(super) fn expected_ident_found_err(&mut self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
+ self.expected_ident_found(false).unwrap_err()
+ }
+
+ /// Checks if the current token is a integer or float literal and looks like
+ /// it could be a invalid identifier with digits at the start.
+ ///
+ /// Returns the number of characters (bytes) composing the invalid portion
+ /// of the identifier and the valid portion of the identifier.
+ pub(super) fn is_lit_bad_ident(&mut self) -> Option<(usize, Symbol)> {
+ // ensure that the integer literal is followed by a *invalid*
+ // suffix: this is how we know that it is a identifier with an
+ // invalid beginning.
+ if let token::Literal(Lit {
+ kind: token::LitKind::Integer | token::LitKind::Float,
+ symbol,
+ suffix: Some(suffix), // no suffix makes it a valid literal
+ }) = self.token.kind
+ && rustc_ast::MetaItemLit::from_token(&self.token).is_none()
+ {
+ Some((symbol.as_str().len(), suffix))
+ } else {
+ None
+ }
}
pub(super) fn expected_one_of_not_found(
@@ -579,21 +633,6 @@ impl<'a> Parser<'a> {
} else {
label_sp
};
- match self.recover_closing_delimiter(
- &expected
- .iter()
- .filter_map(|tt| match tt {
- TokenType::Token(t) => Some(t.clone()),
- _ => None,
- })
- .collect::<Vec<_>>(),
- err,
- ) {
- Err(e) => err = e,
- Ok(recovered) => {
- return Ok(recovered);
- }
- }
if self.check_too_many_raw_str_terminators(&mut err) {
if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
@@ -950,8 +989,7 @@ impl<'a> Parser<'a> {
}
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
- let args = AngleBracketedArgs { args, span }.into();
- segment.args = args;
+ segment.args = Some(AngleBracketedArgs { args, span }.into());
self.sess.emit_err(GenericParamsWithoutAngleBrackets {
span,
@@ -1573,12 +1611,6 @@ impl<'a> Parser<'a> {
);
let mut err = self.struct_span_err(sp, &msg);
let label_exp = format!("expected `{token_str}`");
- match self.recover_closing_delimiter(&[t.clone()], err) {
- Err(e) => err = e,
- Ok(recovered) => {
- return Ok(recovered);
- }
- }
let sm = self.sess.source_map();
if !sm.is_multiline(prev_sp.until(sp)) {
// When the spans are in the same line, it means that the only content
@@ -1795,81 +1827,6 @@ impl<'a> Parser<'a> {
}
}
- pub(super) fn recover_closing_delimiter(
- &mut self,
- tokens: &[TokenKind],
- mut err: DiagnosticBuilder<'a, ErrorGuaranteed>,
- ) -> PResult<'a, bool> {
- let mut pos = None;
- // We want to use the last closing delim that would apply.
- for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
- if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
- && Some(self.token.span) > unmatched.unclosed_span
- {
- pos = Some(i);
- }
- }
- match pos {
- Some(pos) => {
- // Recover and assume that the detected unclosed delimiter was meant for
- // this location. Emit the diagnostic and act as if the delimiter was
- // present for the parser's sake.
-
- // Don't attempt to recover from this unclosed delimiter more than once.
- let unmatched = self.unclosed_delims.remove(pos);
- let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
- if unmatched.found_delim.is_none() {
- // We encountered `Eof`, set this fact here to avoid complaining about missing
- // `fn main()` when we found place to suggest the closing brace.
- *self.sess.reached_eof.borrow_mut() = true;
- }
-
- // We want to suggest the inclusion of the closing delimiter where it makes
- // the most sense, which is immediately after the last token:
- //
- // {foo(bar {}}
- // ^ ^
- // | |
- // | help: `)` may belong here
- // |
- // unclosed delimiter
- if let Some(sp) = unmatched.unclosed_span {
- let mut primary_span: Vec<Span> =
- err.span.primary_spans().iter().cloned().collect();
- primary_span.push(sp);
- let mut primary_span: MultiSpan = primary_span.into();
- for span_label in err.span.span_labels() {
- if let Some(label) = span_label.label {
- primary_span.push_span_label(span_label.span, label);
- }
- }
- err.set_span(primary_span);
- err.span_label(sp, "unclosed delimiter");
- }
- // Backticks should be removed to apply suggestions.
- let mut delim = delim.to_string();
- delim.retain(|c| c != '`');
- err.span_suggestion_short(
- self.prev_token.span.shrink_to_hi(),
- &format!("`{delim}` may belong here"),
- delim,
- Applicability::MaybeIncorrect,
- );
- if unmatched.found_delim.is_none() {
- // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
- // errors which would be emitted elsewhere in the parser and let other error
- // recovery consume the rest of the file.
- Err(err)
- } else {
- err.emit();
- self.expected_tokens.clear(); // Reduce the number of errors.
- Ok(true)
- }
- }
- _ => Err(err),
- }
- }
-
/// Eats tokens until we can be relatively sure we reached the end of the
/// statement. This is something of a best-effort heuristic.
///
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 95a7ca80d..03c82fbd3 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -8,6 +8,7 @@ use super::{
use crate::errors;
use crate::maybe_recover_from_interpolated_ty_qpath;
+use ast::{Path, PathSegment};
use core::mem;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
@@ -29,6 +30,7 @@ use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_span::source_map::{self, Span, Spanned};
+use rustc_span::symbol::kw::PathRoot;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, Pos};
use thin_vec::{thin_vec, ThinVec};
@@ -636,11 +638,27 @@ impl<'a> Parser<'a> {
self.parse_expr_unary(lo, UnOp::Not)
}
- /// Parse `box expr`.
+ /// Parse `box expr` - this syntax has been removed, but we still parse this
+ /// for now to provide an automated way to fix usages of it
fn parse_expr_box(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
let (span, expr) = self.parse_expr_prefix_common(lo)?;
- self.sess.gated_spans.gate(sym::box_syntax, span);
- Ok((span, ExprKind::Box(expr)))
+ let code = self.sess.source_map().span_to_snippet(span.with_lo(lo.hi())).unwrap();
+ self.sess.emit_err(errors::BoxSyntaxRemoved { span, code: code.trim() });
+ // So typechecking works, parse `box <expr>` as `::std::boxed::Box::new(expr)`
+ let path = Path {
+ span,
+ segments: [
+ PathSegment::from_ident(Ident::with_dummy_span(PathRoot)),
+ PathSegment::from_ident(Ident::with_dummy_span(sym::std)),
+ PathSegment::from_ident(Ident::from_str("boxed")),
+ PathSegment::from_ident(Ident::from_str("Box")),
+ PathSegment::from_ident(Ident::with_dummy_span(sym::new)),
+ ]
+ .into(),
+ tokens: None,
+ };
+ let path = self.mk_expr(span, ExprKind::Path(None, path));
+ Ok((span, self.mk_call(path, ThinVec::from([expr]))))
}
fn is_mistaken_not_ident_negation(&self) -> bool {
@@ -1394,19 +1412,6 @@ impl<'a> Parser<'a> {
self.parse_expr_let()
} else if self.eat_keyword(kw::Underscore) {
Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore))
- } else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
- // Don't complain about bare semicolons after unclosed braces
- // recovery in order to keep the error count down. Fixing the
- // delimiters will possibly also fix the bare semicolon found in
- // expression context. For example, silence the following error:
- //
- // error: expected expression, found `;`
- // --> file.rs:2:13
- // |
- // 2 | foo(bar(;
- // | ^ expected expression
- self.bump();
- Ok(self.mk_expr_err(self.token.span))
} else if self.token.uninterpolated_span().rust_2018() {
// `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
if self.check_keyword(kw::Async) {
@@ -1838,20 +1843,14 @@ impl<'a> Parser<'a> {
&mut self,
mk_lit_char: impl FnOnce(Symbol, Span) -> L,
) -> PResult<'a, L> {
- if let token::Interpolated(inner) = &self.token.kind {
- let expr = match inner.as_ref() {
- token::NtExpr(expr) => Some(expr),
- token::NtLiteral(expr) => Some(expr),
- _ => None,
- };
- if let Some(expr) = expr {
- if matches!(expr.kind, ExprKind::Err) {
- let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
- .into_diagnostic(&self.sess.span_diagnostic);
- err.downgrade_to_delayed_bug();
- return Err(err);
- }
- }
+ if let token::Interpolated(nt) = &self.token.kind
+ && let token::NtExpr(e) | token::NtLiteral(e) = &**nt
+ && matches!(e.kind, ExprKind::Err)
+ {
+ let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
+ .into_diagnostic(&self.sess.span_diagnostic);
+ err.downgrade_to_delayed_bug();
+ return Err(err);
}
let token = self.token.clone();
let err = |self_: &Self| {
@@ -2118,7 +2117,7 @@ impl<'a> Parser<'a> {
ClosureBinder::NotPresent
};
- let constness = self.parse_closure_constness(Case::Sensitive);
+ let constness = self.parse_closure_constness();
let movability =
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
@@ -2768,7 +2767,7 @@ impl<'a> Parser<'a> {
(token::DotDotEq, token::Gt)
) {
// `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
- // so we supress the error here
+ // so we suppress the error here
err.delay_as_bug();
this.bump();
} else {
@@ -2912,7 +2911,7 @@ impl<'a> Parser<'a> {
self.expect_keyword(kw::Async)?;
let capture_clause = self.parse_capture_clause()?;
let (attrs, body) = self.parse_inner_attrs_and_block()?;
- let kind = ExprKind::Async(capture_clause, DUMMY_NODE_ID, body);
+ let kind = ExprKind::Async(capture_clause, body);
Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
}
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 8d0f168e0..f8ef1307c 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -53,7 +53,7 @@ impl<'a> Parser<'a> {
let snapshot = self.create_snapshot_for_diagnostic();
match self.parse_ty() {
Ok(p) => {
- if let TyKind::ImplTrait(_, bounds) = &(*p).kind {
+ if let TyKind::ImplTrait(_, bounds) = &p.kind {
let span = impl_span.to(self.token.span.shrink_to_lo());
let mut err = self.struct_span_err(
span,
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 9d9ae154a..6422b8ac1 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -3,6 +3,7 @@ use crate::errors;
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
+use ast::StaticItem;
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
@@ -125,16 +126,13 @@ impl<'a> Parser<'a> {
return Ok(Some(item.into_inner()));
};
- let mut unclosed_delims = vec![];
let item =
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
let item =
this.parse_item_common_(attrs, mac_allowed, attrs_allowed, fn_parse_mode);
- unclosed_delims.append(&mut this.unclosed_delims);
Ok((item?, TrailingToken::None))
})?;
- self.unclosed_delims.append(&mut unclosed_delims);
Ok(item)
}
@@ -230,7 +228,7 @@ impl<'a> Parser<'a> {
self.bump(); // `static`
let m = self.parse_mutability();
let (ident, ty, expr) = self.parse_item_global(Some(m))?;
- (ident, ItemKind::Static(ty, m, expr))
+ (ident, ItemKind::Static(Box::new(StaticItem { ty, mutability: m, expr })))
} else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) {
// CONST ITEM
if self.token.is_keyword(kw::Impl) {
@@ -239,7 +237,7 @@ impl<'a> Parser<'a> {
} else {
self.recover_const_mut(const_span);
let (ident, ty, expr) = self.parse_item_global(None)?;
- (ident, ItemKind::Const(def_(), ty, expr))
+ (ident, ItemKind::Const(Box::new(ConstItem { defaultness: def_(), ty, expr })))
}
} else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
// TRAIT ITEM
@@ -865,9 +863,13 @@ impl<'a> Parser<'a> {
let kind = match AssocItemKind::try_from(kind) {
Ok(kind) => kind,
Err(kind) => match kind {
- ItemKind::Static(a, _, b) => {
+ ItemKind::Static(box StaticItem { ty, mutability: _, expr }) => {
self.sess.emit_err(errors::AssociatedStaticItemNotAllowed { span });
- AssocItemKind::Const(Defaultness::Final, a, b)
+ AssocItemKind::Const(Box::new(ConstItem {
+ defaultness: Defaultness::Final,
+ ty,
+ expr,
+ }))
}
_ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"),
},
@@ -1117,12 +1119,12 @@ impl<'a> Parser<'a> {
let kind = match ForeignItemKind::try_from(kind) {
Ok(kind) => kind,
Err(kind) => match kind {
- ItemKind::Const(_, a, b) => {
+ ItemKind::Const(box ConstItem { ty, expr, .. }) => {
self.sess.emit_err(errors::ExternItemCannotBeConst {
ident_span: ident.span,
const_span: span.with_hi(ident.span.lo()),
});
- ForeignItemKind::Static(a, Mutability::Not, b)
+ ForeignItemKind::Static(ty, Mutability::Not, expr)
}
_ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"),
},
@@ -1184,7 +1186,7 @@ impl<'a> Parser<'a> {
defaultness: Defaultness,
) -> PResult<'a, ItemInfo> {
let impl_span = self.token.span;
- let mut err = self.expected_ident_found();
+ let mut err = self.expected_ident_found_err();
// Only try to recover if this is implementing a trait for a type
let mut impl_info = match self.parse_item_impl(attrs, defaultness) {
@@ -1747,7 +1749,7 @@ impl<'a> Parser<'a> {
/// Parses a field identifier. Specialized version of `parse_ident_common`
/// for better diagnostics and suggestions.
fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
- let (ident, is_raw) = self.ident_or_err()?;
+ let (ident, is_raw) = self.ident_or_err(true)?;
if !is_raw && ident.is_reserved() {
let snapshot = self.create_snapshot_for_diagnostic();
let err = if self.check_fn_front_matter(false, Case::Sensitive) {
@@ -1779,7 +1781,7 @@ impl<'a> Parser<'a> {
Err(err) => {
err.cancel();
self.restore_snapshot(snapshot);
- self.expected_ident_found()
+ self.expected_ident_found_err()
}
}
} else if self.eat_keyword(kw::Struct) {
@@ -1795,11 +1797,11 @@ impl<'a> Parser<'a> {
Err(err) => {
err.cancel();
self.restore_snapshot(snapshot);
- self.expected_ident_found()
+ self.expected_ident_found_err()
}
}
} else {
- let mut err = self.expected_ident_found();
+ let mut err = self.expected_ident_found_err();
if self.eat_keyword_noexpect(kw::Let)
&& let removal_span = self.prev_token.span.until(self.token.span)
&& let Ok(ident) = self.parse_ident_common(false)
@@ -1960,21 +1962,12 @@ impl<'a> Parser<'a> {
// FIXME: This will make us not emit the help even for declarative
// macros within the same crate (that we can fix), which is sad.
if !span.from_expansion() {
- if self.unclosed_delims.is_empty() {
- let DelimSpan { open, close } = args.dspan;
- err.multipart_suggestion(
- "change the delimiters to curly braces",
- vec![(open, "{".to_string()), (close, '}'.to_string())],
- Applicability::MaybeIncorrect,
- );
- } else {
- err.span_suggestion(
- span,
- "change the delimiters to curly braces",
- " { /* items */ }",
- Applicability::HasPlaceholders,
- );
- }
+ let DelimSpan { open, close } = args.dspan;
+ err.multipart_suggestion(
+ "change the delimiters to curly braces",
+ vec![(open, "{".to_string()), (close, '}'.to_string())],
+ Applicability::MaybeIncorrect,
+ );
err.span_suggestion(
span.shrink_to_hi(),
"add a semicolon",
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index da82e4724..aa57b8047 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -29,6 +29,7 @@ use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutab
use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::Ordering;
use rustc_errors::PResult;
use rustc_errors::{
Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
@@ -42,8 +43,7 @@ use thin_vec::ThinVec;
use tracing::debug;
use crate::errors::{
- DocCommentDoesNotDocumentAnything, IncorrectVisibilityRestriction, MismatchedClosingDelimiter,
- NonStringAbiLiteral,
+ IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
};
bitflags::bitflags! {
@@ -146,10 +146,7 @@ pub struct Parser<'a> {
/// See the comments in the `parse_path_segment` function for more details.
unmatched_angle_bracket_count: u32,
max_angle_bracket_count: u32,
- /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
- /// it gets removed from here. Every entry left at the end gets emitted as an independent
- /// error.
- pub(super) unclosed_delims: Vec<UnmatchedDelim>,
+
last_unexpected_token_span: Option<Span>,
/// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
/// looked like it could have been a mistyped path or literal `Option:Some(42)`).
@@ -168,7 +165,7 @@ pub struct Parser<'a> {
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
// it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(Parser<'_>, 312);
+rustc_data_structures::static_assert_size!(Parser<'_>, 288);
/// Stores span information about a closure.
#[derive(Clone)]
@@ -215,12 +212,6 @@ struct CaptureState {
inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
}
-impl<'a> Drop for Parser<'a> {
- fn drop(&mut self) {
- emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
- }
-}
-
/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
/// use this type to emit them as a linear sequence. But a linear sequence is
@@ -478,7 +469,6 @@ impl<'a> Parser<'a> {
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
max_angle_bracket_count: 0,
- unclosed_delims: Vec::new(),
last_unexpected_token_span: None,
last_type_ascription: None,
subparser_name,
@@ -562,21 +552,11 @@ impl<'a> Parser<'a> {
self.parse_ident_common(true)
}
- fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> {
- self.token.ident().ok_or_else(|| match self.prev_token.kind {
- TokenKind::DocComment(..) => DocCommentDoesNotDocumentAnything {
- span: self.prev_token.span,
- missing_comma: None,
- }
- .into_diagnostic(&self.sess.span_diagnostic),
- _ => self.expected_ident_found(),
- })
- }
-
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
- let (ident, is_raw) = self.ident_or_err()?;
+ let (ident, is_raw) = self.ident_or_err(recover)?;
+
if !is_raw && ident.is_reserved() {
- let mut err = self.expected_ident_found();
+ let mut err = self.expected_ident_found_err();
if recover {
err.emit();
} else {
@@ -587,6 +567,21 @@ impl<'a> Parser<'a> {
Ok(ident)
}
+ fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> {
+ let result = self.token.ident().ok_or_else(|| self.expected_ident_found(recover));
+
+ let (ident, is_raw) = match result {
+ Ok(ident) => ident,
+ Err(err) => match err {
+ // we recovered!
+ Ok(ident) => ident,
+ Err(err) => return Err(err),
+ },
+ };
+
+ Ok((ident, is_raw))
+ }
+
/// Checks if the next token is `tok`, and returns `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
@@ -859,7 +854,6 @@ impl<'a> Parser<'a> {
let mut recovered = false;
let mut trailing = false;
let mut v = ThinVec::new();
- let unclosed_delims = !self.unclosed_delims.is_empty();
while !self.expect_any_with_type(kets, expect) {
if let token::CloseDelim(..) | token::Eof = self.token.kind {
@@ -901,7 +895,7 @@ impl<'a> Parser<'a> {
_ => {
// Attempt to keep parsing if it was a similar separator.
if let Some(tokens) = t.similar_tokens() {
- if tokens.contains(&self.token.kind) && !unclosed_delims {
+ if tokens.contains(&self.token.kind) {
self.bump();
}
}
@@ -1207,9 +1201,13 @@ impl<'a> Parser<'a> {
self.parse_constness_(case, false)
}
- /// Parses constness for closures
- fn parse_closure_constness(&mut self, case: Case) -> Const {
- self.parse_constness_(case, true)
+ /// Parses constness for closures (case sensitive, feature-gated)
+ fn parse_closure_constness(&mut self) -> Const {
+ let constness = self.parse_constness_(Case::Sensitive, true);
+ if let Const::Yes(span) = constness {
+ self.sess.gated_spans.gate(sym::const_closures, span);
+ }
+ constness
}
fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
@@ -1543,8 +1541,10 @@ pub(crate) fn make_unclosed_delims_error(
}
pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedDelim>, sess: &ParseSess) {
- *sess.reached_eof.borrow_mut() |=
- unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
+ let _ = sess.reached_eof.fetch_or(
+ unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none()),
+ Ordering::Relaxed,
+ );
for unmatched in unclosed_delims.drain(..) {
if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
e.emit();
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 8e920f1c4..2246002f5 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -391,7 +391,13 @@ impl<'a> Parser<'a> {
} else {
PatKind::Lit(const_expr)
}
- } else if self.can_be_ident_pat() {
+ // Don't eagerly error on semantically invalid tokens when matching
+ // declarative macros, as the input to those doesn't have to be
+ // semantically valid. For attribute/derive proc macros this is not the
+ // case, so doing the recovery for them is fine.
+ } else if self.can_be_ident_pat()
+ || (self.is_lit_bad_ident().is_some() && self.may_recover())
+ {
// Parse `ident @ pat`
// This can give false positives and parse nullary enums,
// they are dealt with later in resolve.
@@ -590,7 +596,7 @@ impl<'a> Parser<'a> {
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtPat(_) = **nt {
- self.expected_ident_found().emit();
+ self.expected_ident_found_err().emit();
}
}
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index b50d2984a..c25c23d84 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -1,6 +1,6 @@
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{Parser, Restrictions, TokenType};
-use crate::maybe_whole;
+use crate::{errors, maybe_whole};
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::{
@@ -290,6 +290,32 @@ impl<'a> Parser<'a> {
})?;
let span = lo.to(self.prev_token.span);
AngleBracketedArgs { args, span }.into()
+ } else if self.may_recover()
+ && self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
+ // FIXME(return_type_notation): Could also recover `...` here.
+ && self.look_ahead(1, |tok| tok.kind == token::DotDot)
+ {
+ self.bump();
+ self.sess
+ .emit_err(errors::BadReturnTypeNotationDotDot { span: self.token.span });
+ self.bump();
+ self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+ let span = lo.to(self.prev_token.span);
+
+ if self.eat_noexpect(&token::RArrow) {
+ let lo = self.prev_token.span;
+ let ty = self.parse_ty()?;
+ self.sess
+ .emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) });
+ }
+
+ ParenthesizedArgs {
+ span,
+ inputs: ThinVec::new(),
+ inputs_span: span,
+ output: ast::FnRetTy::Default(self.prev_token.span.shrink_to_hi()),
+ }
+ .into()
} else {
// `(T, U) -> R`
let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
@@ -300,7 +326,7 @@ impl<'a> Parser<'a> {
ParenthesizedArgs { span, inputs, inputs_span, output }.into()
};
- PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
+ PathSegment { ident, args: Some(args), id: ast::DUMMY_NODE_ID }
} else {
// Generic arguments are not found.
PathSegment::from_ident(ident)
@@ -547,10 +573,16 @@ impl<'a> Parser<'a> {
};
let span = lo.to(self.prev_token.span);
-
// Gate associated type bounds, e.g., `Iterator<Item: Ord>`.
if let AssocConstraintKind::Bound { .. } = kind {
- self.sess.gated_spans.gate(sym::associated_type_bounds, span);
+ if let Some(ast::GenericArgs::Parenthesized(args)) = &gen_args
+ && args.inputs.is_empty()
+ && matches!(args.output, ast::FnRetTy::Default(..))
+ {
+ self.sess.gated_spans.gate(sym::return_type_notation, span);
+ } else {
+ self.sess.gated_spans.gate(sym::associated_type_bounds, span);
+ }
}
let constraint =
AssocConstraint { id: ast::DUMMY_NODE_ID, ident, gen_args, kind, span };
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 92a22ffc2..fbe5b88c4 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -273,7 +273,6 @@ impl<'a> Parser<'a> {
self.bump();
}
- self.report_invalid_identifier_error()?;
let (pat, colon) =
self.parse_pat_before_ty(None, RecoverComma::Yes, PatternLocation::LetBinding)?;
@@ -366,17 +365,6 @@ impl<'a> Parser<'a> {
Ok(P(ast::Local { ty, pat, kind, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None }))
}
- /// report error for `let 1x = 123`
- pub fn report_invalid_identifier_error(&mut self) -> PResult<'a, ()> {
- if let token::Literal(lit) = self.token.uninterpolate().kind &&
- rustc_ast::MetaItemLit::from_token(&self.token).is_none() &&
- (lit.kind == token::LitKind::Integer || lit.kind == token::LitKind::Float) &&
- self.look_ahead(1, |t| matches!(t.kind, token::Eq) || matches!(t.kind, token::Colon ) ) {
- return Err(self.sess.create_err(errors::InvalidIdentiferStartsWithNumber { span: self.token.span }));
- }
- Ok(())
- }
-
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
if let ast::ExprKind::Binary(op, ..) = init.kind {
if op.node.lazy() {
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 6fe4da71f..400c8dbe9 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -624,10 +624,12 @@ impl<'a> Parser<'a> {
///
/// Note that this does *not* parse bare trait objects.
fn parse_dyn_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
+ let lo = self.token.span;
self.bump(); // `dyn`
// parse dyn* types
let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
+ self.sess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
TraitObjectSyntax::DynStar
} else {
TraitObjectSyntax::Dyn
@@ -1057,8 +1059,11 @@ impl<'a> Parser<'a> {
output,
}
.into();
- *fn_path_segment =
- ast::PathSegment { ident: fn_path_segment.ident, args, id: ast::DUMMY_NODE_ID };
+ *fn_path_segment = ast::PathSegment {
+ ident: fn_path_segment.ident,
+ args: Some(args),
+ id: ast::DUMMY_NODE_ID,
+ };
// Convert parsed `<'a>` in `Fn<'a>` into `for<'a>`.
let mut generic_params = lifetimes