summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_parse/src/parser
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src/parser')
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs37
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs14
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs85
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs435
-rw-r--r--compiler/rustc_parse/src/parser/item.rs171
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs142
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs16
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs18
-rw-r--r--compiler/rustc_parse/src/parser/path.rs17
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs86
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs69
11 files changed, 655 insertions, 435 deletions
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 9e4565694..c7d239b64 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -55,7 +55,7 @@ impl<'a> Parser<'a> {
let span = self.token.span;
let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
span,
- fluent::parser_inner_doc_comment_not_permitted,
+ fluent::parse_inner_doc_comment_not_permitted,
error_code!(E0753),
);
if let Some(replacement_span) = self.annotate_following_item_if_applicable(
@@ -200,7 +200,7 @@ impl<'a> Parser<'a> {
Some(InnerAttrForbiddenReason::AfterOuterDocComment { prev_doc_comment_span }) => {
let mut diag = self.struct_span_err(
attr_sp,
- fluent::parser_inner_attr_not_permitted_after_outer_doc_comment,
+ fluent::parse_inner_attr_not_permitted_after_outer_doc_comment,
);
diag.span_label(attr_sp, fluent::label_attr)
.span_label(prev_doc_comment_span, fluent::label_prev_doc_comment);
@@ -209,18 +209,18 @@ impl<'a> Parser<'a> {
Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => {
let mut diag = self.struct_span_err(
attr_sp,
- fluent::parser_inner_attr_not_permitted_after_outer_attr,
+ fluent::parse_inner_attr_not_permitted_after_outer_attr,
);
diag.span_label(attr_sp, fluent::label_attr)
.span_label(prev_outer_attr_sp, fluent::label_prev_attr);
diag
}
Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
- self.struct_span_err(attr_sp, fluent::parser_inner_attr_not_permitted)
+ self.struct_span_err(attr_sp, fluent::parse_inner_attr_not_permitted)
}
};
- diag.note(fluent::parser_inner_attr_explanation);
+ diag.note(fluent::parse_inner_attr_explanation);
if self
.annotate_following_item_if_applicable(
&mut diag,
@@ -229,7 +229,7 @@ impl<'a> Parser<'a> {
)
.is_some()
{
- diag.note(fluent::parser_outer_attr_explanation);
+ diag.note(fluent::parse_outer_attr_explanation);
};
diag.emit();
}
@@ -245,9 +245,9 @@ impl<'a> Parser<'a> {
/// PATH `=` UNSUFFIXED_LIT
/// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
- let item = match self.token.kind {
- token::Interpolated(ref nt) => match **nt {
- Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
+ let item = match &self.token.kind {
+ token::Interpolated(nt) => match &**nt {
+ Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
_ => None,
},
_ => None,
@@ -315,9 +315,10 @@ impl<'a> Parser<'a> {
Ok(attrs)
}
- pub(crate) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
- let lit = self.parse_lit()?;
- debug!("checking if {:?} is unusuffixed", lit);
+ // Note: must be unsuffixed.
+ pub(crate) fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'a, ast::MetaItemLit> {
+ let lit = self.parse_meta_item_lit()?;
+ debug!("checking if {:?} is unsuffixed", lit);
if !lit.kind.is_unsuffixed() {
self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
@@ -364,9 +365,9 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
/// ```
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
- let nt_meta = match self.token.kind {
- token::Interpolated(ref nt) => match **nt {
- token::NtMeta(ref e) => Some(e.clone()),
+ let nt_meta = match &self.token.kind {
+ token::Interpolated(nt) => match &**nt {
+ token::NtMeta(e) => Some(e.clone()),
_ => None,
},
_ => None,
@@ -391,7 +392,7 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) {
- ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
+ ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
@@ -403,8 +404,8 @@ impl<'a> Parser<'a> {
/// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
- match self.parse_unsuffixed_lit() {
- Ok(lit) => return Ok(ast::NestedMetaItem::Literal(lit)),
+ match self.parse_unsuffixed_meta_item_lit() {
+ Ok(lit) => return Ok(ast::NestedMetaItem::Lit(lit)),
Err(err) => err.cancel(),
}
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 1b16ecb5e..a084a7010 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -5,7 +5,8 @@ use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spac
use rustc_ast::{self as ast};
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
use rustc_errors::PResult;
-use rustc_span::{sym, Span};
+use rustc_session::parse::ParseSess;
+use rustc_span::{sym, Span, DUMMY_SP};
use std::convert::TryInto;
use std::ops::Range;
@@ -39,12 +40,17 @@ impl AttrWrapper {
pub fn empty() -> AttrWrapper {
AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
}
- // FIXME: Delay span bug here?
- pub(crate) fn take_for_recovery(self) -> AttrVec {
+
+ pub(crate) fn take_for_recovery(self, sess: &ParseSess) -> AttrVec {
+ sess.span_diagnostic.delay_span_bug(
+ self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
+ "AttrVec is taken for recovery but no error is produced",
+ );
+
self.attrs
}
- // Prepend `self.attrs` to `attrs`.
+ /// Prepend `self.attrs` to `attrs`.
// FIXME: require passing an NT to prevent misuse of this method
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
let mut self_attrs = self.attrs;
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 309717350..c316a4dd6 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -18,6 +18,7 @@ use crate::errors::{
};
use crate::lexer::UnmatchedBrace;
+use crate::parser;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
@@ -37,11 +38,10 @@ use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, SpanSnippetError, DUMMY_SP};
-use std::ops::{Deref, DerefMut};
-
use std::mem::take;
-
-use crate::parser;
+use std::ops::{Deref, DerefMut};
+use thin_vec::{thin_vec, ThinVec};
+use tracing::{debug, trace};
/// Creates a placeholder argument.
pub(super) fn dummy_arg(ident: Ident) -> Param {
@@ -65,7 +65,7 @@ pub(super) fn dummy_arg(ident: Ident) -> Param {
pub(super) trait RecoverQPath: Sized + 'static {
const PATH_STYLE: PathStyle = PathStyle::Expr;
fn to_ty(&self) -> Option<P<Ty>>;
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self;
}
impl RecoverQPath for Ty {
@@ -73,7 +73,7 @@ impl RecoverQPath for Ty {
fn to_ty(&self) -> Option<P<Ty>> {
Some(P(self.clone()))
}
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
Self {
span: path.span,
kind: TyKind::Path(qself, path),
@@ -87,7 +87,7 @@ impl RecoverQPath for Pat {
fn to_ty(&self) -> Option<P<Ty>> {
self.to_ty()
}
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
Self {
span: path.span,
kind: PatKind::Path(qself, path),
@@ -101,7 +101,7 @@ impl RecoverQPath for Expr {
fn to_ty(&self) -> Option<P<Ty>> {
self.to_ty()
}
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
Self {
span: path.span,
kind: ExprKind::Path(qself, path),
@@ -224,9 +224,9 @@ impl MultiSugg {
}
}
-// SnapshotParser is used to create a snapshot of the parser
-// without causing duplicate errors being emitted when the `Parser`
-// is dropped.
+/// SnapshotParser is used to create a snapshot of the parser
+/// without causing duplicate errors being emitted when the `Parser`
+/// is dropped.
pub struct SnapshotParser<'a> {
parser: Parser<'a>,
unclosed_delims: Vec<UnmatchedBrace>,
@@ -638,8 +638,11 @@ impl<'a> Parser<'a> {
// field: value,
// }
let mut snapshot = self.create_snapshot_for_diagnostic();
- let path =
- Path { segments: vec![], span: self.prev_token.span.shrink_to_lo(), tokens: None };
+ let path = Path {
+ segments: ThinVec::new(),
+ span: self.prev_token.span.shrink_to_lo(),
+ tokens: None,
+ };
let struct_expr = snapshot.parse_struct_expr(None, path, false);
let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
return Some(match (struct_expr, block_tail) {
@@ -769,6 +772,10 @@ impl<'a> Parser<'a> {
segment: &PathSegment,
end: &[&TokenKind],
) -> bool {
+ if !self.may_recover() {
+ return false;
+ }
+
// This function is intended to be invoked after parsing a path segment where there are two
// cases:
//
@@ -863,6 +870,10 @@ impl<'a> Parser<'a> {
/// Check if a method call with an intended turbofish has been written without surrounding
/// angle brackets.
pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment) {
+ if !self.may_recover() {
+ return;
+ }
+
if token::ModSep == self.token.kind && segment.args.is_none() {
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
@@ -926,7 +937,7 @@ impl<'a> Parser<'a> {
if self.eat(&token::Gt) {
e.span_suggestion_verbose(
binop.span.shrink_to_lo(),
- fluent::parser_sugg_turbofish_syntax,
+ fluent::parse_sugg_turbofish_syntax,
"::",
Applicability::MaybeIncorrect,
)
@@ -962,7 +973,7 @@ impl<'a> Parser<'a> {
inner_op: &Expr,
outer_op: &Spanned<AssocOp>,
) -> bool /* advanced the cursor */ {
- if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
+ if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
if let ExprKind::Field(_, ident) = l1.kind
&& ident.as_str().parse::<i32>().is_err()
&& !matches!(r1.kind, ExprKind::Lit(_))
@@ -1068,8 +1079,8 @@ impl<'a> Parser<'a> {
let mk_err_expr = |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err)));
- match inner_op.kind {
- ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
+ match &inner_op.kind {
+ ExprKind::Binary(op, l1, r1) if op.node.is_comparison() => {
let mut err = ComparisonOperatorsCannotBeChained {
span: vec![op.span, self.prev_token.span],
suggest_turbofish: None,
@@ -1226,8 +1237,8 @@ impl<'a> Parser<'a> {
let bounds = self.parse_generic_bounds(None)?;
let sum_span = ty.span.to(self.prev_token.span);
- let sub = match ty.kind {
- TyKind::Rptr(ref lifetime, ref mut_ty) => {
+ let sub = match &ty.kind {
+ TyKind::Rptr(lifetime, mut_ty) => {
let sum_with_parens = pprust::to_string(|s| {
s.s.word("&");
s.print_opt_lifetime(lifetime);
@@ -1396,6 +1407,10 @@ impl<'a> Parser<'a> {
&mut self,
base: P<T>,
) -> PResult<'a, P<T>> {
+ if !self.may_recover() {
+ return Ok(base);
+ }
+
// Do not add `::` to expected tokens.
if self.token == token::ModSep {
if let Some(ty) = base.to_ty() {
@@ -1414,7 +1429,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<T>> {
self.expect(&token::ModSep)?;
- let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP, tokens: None };
+ let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
path.span = ty_span.to(self.prev_token.span);
@@ -1425,7 +1440,7 @@ impl<'a> Parser<'a> {
});
let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
- Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
+ Ok(P(T::recovered(Some(P(QSelf { ty, path_span, position: 0 })), path)))
}
pub fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
@@ -1641,15 +1656,29 @@ impl<'a> Parser<'a> {
(token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
self.bump();
+ let sm = self.sess.source_map();
+ let left = begin_par_sp;
+ let right = self.prev_token.span;
+ let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left) &&
+ !snip.ends_with(' ') {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
+
+ let right_snippet = if let Ok(snip) = sm.span_to_next_source(right) &&
+ !snip.starts_with(' ') {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
+
self.sess.emit_err(ParenthesesInForHead {
- span: vec![begin_par_sp, self.prev_token.span],
+ span: vec![left, right],
// With e.g. `for (x) in y)` this would replace `(x) in y)`
// with `x) in y)` which is syntactically invalid.
// However, this is prevented before we get here.
- sugg: ParenthesesInForHeadSugg {
- left: begin_par_sp,
- right: self.prev_token.span,
- },
+ sugg: ParenthesesInForHeadSugg { left, right, left_snippet, right_snippet },
});
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
@@ -2408,7 +2437,7 @@ impl<'a> Parser<'a> {
None,
Path {
span: new_span,
- segments: vec![
+ segments: thin_vec![
PathSegment::from_ident(*old_ident),
PathSegment::from_ident(*ident),
],
@@ -2536,7 +2565,7 @@ impl<'a> Parser<'a> {
if let [a, b] = segments {
let (a_span, b_span) = (a.span(), b.span());
let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
- if self.span_to_snippet(between_span).as_ref().map(|a| &a[..]) == Ok(":: ") {
+ if self.span_to_snippet(between_span).as_deref() == Ok(":: ") {
return Err(DoubleColonInBound {
span: path.span.shrink_to_hi(),
between: between_span,
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index a781748ef..f6a6ed379 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -7,46 +7,42 @@ use super::{
};
use crate::errors::{
ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
- BinaryFloatLiteralNotSupported, BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct,
- ComparisonInterpretedAsGeneric, ComparisonOrShiftInterpretedAsGenericSugg,
- DoCatchSyntaxRemoved, DotDotDot, EqFieldInit, ExpectedElseBlock, ExpectedExpressionFoundLet,
+ BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct, ComparisonInterpretedAsGeneric,
+ ComparisonOrShiftInterpretedAsGenericSugg, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit,
+ ExpectedElseBlock, ExpectedEqForLetExpr, ExpectedExpressionFoundLet,
FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt,
- HexadecimalFloatLiteralNotSupported, IfExpressionMissingCondition,
- IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub, IntLiteralTooLarge,
+ IfExpressionMissingCondition, IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub,
InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
- InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth, InvalidIntLiteralWidth,
- InvalidInterpolatedExpression, InvalidLiteralSuffix, InvalidLiteralSuffixOnTupleIndex,
- InvalidLogicalOperator, InvalidLogicalOperatorSub, InvalidNumLiteralBasePrefix,
- InvalidNumLiteralSuffix, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
+ InvalidInterpolatedExpression, InvalidLiteralSuffixOnTupleIndex, InvalidLogicalOperator,
+ InvalidLogicalOperatorSub, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
- MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray, NoFieldsForFnCall,
- NotAsNegationOperator, NotAsNegationOperatorSub, OctalFloatLiteralNotSupported,
+ MissingDotDot, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
+ NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub,
OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
- StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
- UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
+ StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedIfWithIf,
+ UnexpectedTokenAfterLabel, UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
};
use crate::maybe_recover_from_interpolated_ty_qpath;
-
use core::mem;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::Spacing;
+use rustc_ast::util::case::Case;
use rustc_ast::util::classify;
-use rustc_ast::util::literal::LitError;
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
use rustc_ast::visit::Visitor;
-use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
+use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, UnOp, DUMMY_NODE_ID};
use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
-use rustc_ast::{ClosureBinder, StmtKind};
+use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
use rustc_ast_pretty::pprust;
use rustc_errors::{
Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
StashKey,
};
-use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_span::source_map::{self, Span, Spanned};
@@ -132,7 +128,7 @@ impl<'a> Parser<'a> {
Ok(expr) => Ok(expr),
Err(mut err) => match self.token.ident() {
Some((Ident { name: kw::Underscore, .. }, false))
- if self.look_ahead(1, |t| t == &token::Comma) =>
+ if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
{
// Special-case handling of `foo(_, _, _)`
err.emit();
@@ -394,20 +390,11 @@ impl<'a> Parser<'a> {
// want to keep their span info to improve diagnostics in these cases in a later stage.
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
- (true, Some(AssocOp::Add)) // `{ 42 } + 42
- // If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
- // `if x { a } else { b } && if y { c } else { d }`
- if !self.look_ahead(1, |t| t.is_used_keyword()) => {
- // These cases are ambiguous and can't be identified in the parser alone.
- let sp = self.sess.source_map().start_point(self.token.span);
- self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
- false
- }
- (true, Some(AssocOp::LAnd)) |
- (true, Some(AssocOp::LOr)) |
- (true, Some(AssocOp::BitOr)) => {
- // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`. Separated from the
- // above due to #74233.
+ (true, Some(AssocOp::Add)) | // `{ 42 } + 42` (unary plus)
+ (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`
+ (true, Some(AssocOp::LOr)) | // `{ 42 } || 42` ("logical or" or closure)
+ (true, Some(AssocOp::BitOr)) // `{ 42 } | 42` or `{ 42 } |x| 42`
+ => {
// These cases are ambiguous and can't be identified in the parser alone.
//
// Bitwise AND is left out because guessing intent is hard. We can make
@@ -418,7 +405,7 @@ impl<'a> Parser<'a> {
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
false
}
- (true, Some(ref op)) if !op.can_continue_expr_unambiguously() => false,
+ (true, Some(op)) if !op.can_continue_expr_unambiguously() => false,
(true, Some(_)) => {
self.error_found_expr_would_be_stmt(lhs);
true
@@ -456,7 +443,7 @@ impl<'a> Parser<'a> {
return None;
}
(Some(op), _) => (op, self.token.span),
- (None, Some((Ident { name: sym::and, span }, false))) => {
+ (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
self.sess.emit_err(InvalidLogicalOperator {
span: self.token.span,
incorrect: "and".into(),
@@ -464,7 +451,7 @@ impl<'a> Parser<'a> {
});
(AssocOp::LAnd, span)
}
- (None, Some((Ident { name: sym::or, span }, false))) => {
+ (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
self.sess.emit_err(InvalidLogicalOperator {
span: self.token.span,
incorrect: "or".into(),
@@ -615,7 +602,7 @@ impl<'a> Parser<'a> {
token::Ident(..) if this.token.is_keyword(kw::Box) => {
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
}
- token::Ident(..) if this.is_mistaken_not_ident_negation() => {
+ token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
}
_ => return this.parse_dot_or_call_expr(Some(attrs)),
@@ -718,6 +705,10 @@ impl<'a> Parser<'a> {
let cast_expr = match self.parse_as_cast_ty() {
Ok(rhs) => mk_expr(self, lhs, rhs),
Err(type_err) => {
+ if !self.may_recover() {
+ return Err(type_err);
+ }
+
// Rewind to before attempting to parse the type with generics, to recover
// from situations like `x as usize < y` in which we first tried to parse
// `usize < y` as a type with generic arguments.
@@ -829,23 +820,18 @@ impl<'a> Parser<'a> {
("cast", None)
};
- // Save the memory location of expr before parsing any following postfix operators.
- // This will be compared with the memory location of the output expression.
- // If they different we can assume we parsed another expression because the existing expression is not reallocated.
- let addr_before = &*cast_expr as *const _ as usize;
let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?;
- let changed = addr_before != &*with_postfix as *const _ as usize;
// Check if an illegal postfix operator has been added after the cast.
- // If the resulting expression is not a cast, or has a different memory location, it is an illegal postfix operator.
- if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) || changed {
+ // If the resulting expression is not a cast, it is an illegal postfix operator.
+ if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) {
let msg = format!(
"{cast_kind} cannot be followed by {}",
match with_postfix.kind {
ExprKind::Index(_, _) => "indexing",
ExprKind::Try(_) => "`?`",
ExprKind::Field(_, _) => "a field access",
- ExprKind::MethodCall(_, _, _, _) => "a method call",
+ ExprKind::MethodCall(_) => "a method call",
ExprKind::Call(_, _) => "a function call",
ExprKind::Await(_) => "`.await`",
ExprKind::Err => return Ok(with_postfix),
@@ -1197,6 +1183,10 @@ impl<'a> Parser<'a> {
seq: &mut PResult<'a, P<Expr>>,
snapshot: Option<(SnapshotParser<'a>, ExprKind)>,
) -> Option<P<Expr>> {
+ if !self.may_recover() {
+ return None;
+ }
+
match (seq.as_mut(), snapshot) {
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
snapshot.bump(); // `(`
@@ -1263,24 +1253,32 @@ impl<'a> Parser<'a> {
}
let fn_span_lo = self.token.span;
- let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
- self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
- self.check_turbofish_missing_angle_brackets(&mut segment);
+ let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
+ self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]);
+ self.check_turbofish_missing_angle_brackets(&mut seg);
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Method call `expr.f()`
let args = self.parse_paren_expr_seq()?;
let fn_span = fn_span_lo.to(self.prev_token.span);
let span = lo.to(self.prev_token.span);
- Ok(self.mk_expr(span, ExprKind::MethodCall(segment, self_arg, args, fn_span)))
+ Ok(self.mk_expr(
+ span,
+ ExprKind::MethodCall(Box::new(ast::MethodCall {
+ seg,
+ receiver: self_arg,
+ args,
+ span: fn_span,
+ })),
+ ))
} else {
// Field access `expr.f`
- if let Some(args) = segment.args {
+ if let Some(args) = seg.args {
self.sess.emit_err(FieldExpressionWithGeneric(args.span()));
}
let span = lo.to(self.prev_token.span);
- Ok(self.mk_expr(span, ExprKind::Field(self_arg, segment.ident)))
+ Ok(self.mk_expr(span, ExprKind::Field(self_arg, seg.ident)))
}
}
@@ -1360,7 +1358,7 @@ impl<'a> Parser<'a> {
)
} else if self.check_inline_const(0) {
self.parse_const_block(lo.to(self.token.span), false)
- } else if self.is_do_catch_block() {
+ } else if self.may_recover() && self.is_do_catch_block() {
self.recover_do_catch()
} else if self.is_try_block() {
self.expect_keyword(kw::Try)?;
@@ -1411,9 +1409,9 @@ impl<'a> Parser<'a> {
fn parse_lit_expr(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
- match self.parse_opt_lit() {
- Some(literal) => {
- let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal));
+ match self.parse_opt_token_lit() {
+ Some((token_lit, _)) => {
+ let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit));
self.maybe_recover_from_bad_qpath(expr)
}
None => self.try_macro_suggestion(),
@@ -1494,12 +1492,12 @@ impl<'a> Parser<'a> {
let lo = path.span;
let mac = P(MacCall {
path,
- args: self.parse_mac_args()?,
+ args: self.parse_delim_args()?,
prior_type_ascription: self.last_type_ascription,
});
(lo.to(self.prev_token.span), ExprKind::MacCall(mac))
} else if self.check(&token::OpenDelim(Delimiter::Brace)) &&
- let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path) {
+ let Some(expr) = self.maybe_parse_struct_expr(&qself, &path) {
if qself.is_some() {
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
}
@@ -1532,6 +1530,7 @@ impl<'a> Parser<'a> {
{
self.parse_block_expr(label, lo, BlockCheckMode::Default)
} else if !ate_colon
+ && self.may_recover()
&& (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
|| self.token.is_op())
{
@@ -1543,7 +1542,7 @@ impl<'a> Parser<'a> {
})
});
consume_colon = false;
- Ok(self.mk_expr(lo, ExprKind::Lit(lit)))
+ Ok(self.mk_expr(lo, ExprKind::Lit(lit.token_lit)))
} else if !ate_colon
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
{
@@ -1620,10 +1619,10 @@ impl<'a> Parser<'a> {
/// Emit an error when a char is parsed as a lifetime because of a missing quote
pub(super) fn recover_unclosed_char(
- &mut self,
+ &self,
lifetime: Ident,
- err: impl FnOnce(&mut Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
- ) -> ast::Lit {
+ err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
+ ) -> ast::MetaItemLit {
if let Some(mut diag) =
self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
{
@@ -1644,9 +1643,10 @@ impl<'a> Parser<'a> {
)
.emit();
}
- ast::Lit {
- token_lit: token::Lit::new(token::LitKind::Char, lifetime.name, None),
- kind: ast::LitKind::Char(lifetime.name.as_str().chars().next().unwrap_or('_')),
+ let name = lifetime.without_first_quote().name;
+ ast::MetaItemLit {
+ token_lit: token::Lit::new(token::LitKind::Char, name, None),
+ kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
span: lifetime.span,
}
}
@@ -1719,13 +1719,13 @@ impl<'a> Parser<'a> {
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
{
let expr = self.parse_expr_opt()?;
- if let Some(ref expr) = expr {
+ if let Some(expr) = &expr {
if label.is_some()
&& matches!(
expr.kind,
ExprKind::While(_, _, None)
| ExprKind::ForLoop(_, _, _, None)
- | ExprKind::Loop(_, None)
+ | ExprKind::Loop(_, None, _)
| ExprKind::Block(_, None)
)
{
@@ -1759,8 +1759,8 @@ impl<'a> Parser<'a> {
/// Returns a string literal if the next token is a string literal.
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
/// and returns `None` if the next token is not literal at all.
- pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
- match self.parse_opt_lit() {
+ pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<MetaItemLit>> {
+ match self.parse_opt_meta_item_lit() {
Some(lit) => match lit.kind {
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
style,
@@ -1775,41 +1775,47 @@ impl<'a> Parser<'a> {
}
}
- pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
- self.parse_opt_lit().ok_or(()).or_else(|()| {
- if let token::Interpolated(inner) = &self.token.kind {
- let expr = match inner.as_ref() {
- token::NtExpr(expr) => Some(expr),
- token::NtLiteral(expr) => Some(expr),
- _ => None,
- };
- if let Some(expr) = expr {
- if matches!(expr.kind, ExprKind::Err) {
- let mut err = InvalidInterpolatedExpression { span: self.token.span }
- .into_diagnostic(&self.sess.span_diagnostic);
- err.downgrade_to_delayed_bug();
- return Err(err);
- }
- }
- }
- let token = self.token.clone();
- let err = |self_: &mut Self| {
- let msg = format!("unexpected token: {}", super::token_descr(&token));
- self_.struct_span_err(token.span, &msg)
+ fn handle_missing_lit(&mut self) -> PResult<'a, MetaItemLit> {
+ if let token::Interpolated(inner) = &self.token.kind {
+ let expr = match inner.as_ref() {
+ token::NtExpr(expr) => Some(expr),
+ token::NtLiteral(expr) => Some(expr),
+ _ => None,
};
- // On an error path, eagerly consider a lifetime to be an unclosed character lit
- if self.token.is_lifetime() {
- let lt = self.expect_lifetime();
- Ok(self.recover_unclosed_char(lt.ident, err))
- } else {
- Err(err(self))
+ if let Some(expr) = expr {
+ if matches!(expr.kind, ExprKind::Err) {
+ let mut err = InvalidInterpolatedExpression { span: self.token.span }
+ .into_diagnostic(&self.sess.span_diagnostic);
+ err.downgrade_to_delayed_bug();
+ return Err(err);
+ }
}
- })
+ }
+ let token = self.token.clone();
+ let err = |self_: &Self| {
+ let msg = format!("unexpected token: {}", super::token_descr(&token));
+ self_.struct_span_err(token.span, &msg)
+ };
+ // On an error path, eagerly consider a lifetime to be an unclosed character lit
+ if self.token.is_lifetime() {
+ let lt = self.expect_lifetime();
+ Ok(self.recover_unclosed_char(lt.ident, err))
+ } else {
+ Err(err(self))
+ }
}
- /// Matches `lit = true | false | token_lit`.
- /// Returns `None` if the next token is not a literal.
- pub(super) fn parse_opt_lit(&mut self) -> Option<Lit> {
+ pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> {
+ self.parse_opt_token_lit()
+ .ok_or(())
+ .or_else(|()| self.handle_missing_lit().map(|lit| (lit.token_lit, lit.span)))
+ }
+
+ pub(super) fn parse_meta_item_lit(&mut self) -> PResult<'a, MetaItemLit> {
+ self.parse_opt_meta_item_lit().ok_or(()).or_else(|()| self.handle_missing_lit())
+ }
+
+ fn recover_after_dot(&mut self) -> Option<Token> {
let mut recovered = None;
if self.token == token::Dot {
// Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
@@ -1835,100 +1841,53 @@ impl<'a> Parser<'a> {
}
}
- let token = recovered.as_ref().unwrap_or(&self.token);
- match Lit::from_token(token) {
- Ok(lit) => {
- self.bump();
- Some(lit)
- }
- Err(LitError::NotLiteral) => None,
- Err(err) => {
- let span = token.span;
- let token::Literal(lit) = token.kind else {
- unreachable!();
- };
- self.bump();
- self.report_lit_error(err, lit, span);
- // Pack possible quotes and prefixes from the original literal into
- // the error literal's symbol so they can be pretty-printed faithfully.
- let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
- let symbol = Symbol::intern(&suffixless_lit.to_string());
- let lit = token::Lit::new(token::Err, symbol, lit.suffix);
- Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!()))
- }
- }
+ recovered
}
- fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
- // Checks if `s` looks like i32 or u1234 etc.
- fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
- s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
- }
-
- // Try to lowercase the prefix if it's a valid base prefix.
- fn fix_base_capitalisation(s: &str) -> Option<String> {
- if let Some(stripped) = s.strip_prefix('B') {
- Some(format!("0b{stripped}"))
- } else if let Some(stripped) = s.strip_prefix('O') {
- Some(format!("0o{stripped}"))
- } else if let Some(stripped) = s.strip_prefix('X') {
- Some(format!("0x{stripped}"))
- } else {
- None
- }
- }
+ /// Matches `lit = true | false | token_lit`.
+ /// Returns `None` if the next token is not a literal.
+ pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
+ let recovered = self.recover_after_dot();
+ let token = recovered.as_ref().unwrap_or(&self.token);
+ let span = token.span;
+ token::Lit::from_token(token).map(|token_lit| {
+ self.bump();
+ (token_lit, span)
+ })
+ }
- let token::Lit { kind, suffix, .. } = lit;
- match err {
- // `NotLiteral` is not an error by itself, so we don't report
- // it and give the parser opportunity to try something else.
- LitError::NotLiteral => {}
- // `LexerError` *is* an error, but it was already reported
- // by lexer, so here we don't report it the second time.
- LitError::LexerError => {}
- LitError::InvalidSuffix => {
- if let Some(suffix) = suffix {
- self.sess.emit_err(InvalidLiteralSuffix {
- span,
- kind: format!("{}", kind.descr()),
- suffix,
- });
- }
- }
- LitError::InvalidIntSuffix => {
- let suf = suffix.expect("suffix error with no suffix");
- let suf = suf.as_str();
- if looks_like_width_suffix(&['i', 'u'], &suf) {
- // If it looks like a width, try to be helpful.
- self.sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
- } else if let Some(fixed) = fix_base_capitalisation(suf) {
- self.sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
- } else {
- self.sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
- }
- }
- LitError::InvalidFloatSuffix => {
- let suf = suffix.expect("suffix error with no suffix");
- let suf = suf.as_str();
- if looks_like_width_suffix(&['f'], suf) {
- // If it looks like a width, try to be helpful.
- self.sess
- .emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
- } else {
- self.sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
+ /// Matches `lit = true | false | token_lit`.
+ /// Returns `None` if the next token is not a literal.
+ pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
+ let recovered = self.recover_after_dot();
+ let token = recovered.as_ref().unwrap_or(&self.token);
+ match token::Lit::from_token(token) {
+ Some(token_lit) => {
+ match MetaItemLit::from_token_lit(token_lit, token.span) {
+ Ok(lit) => {
+ self.bump();
+ Some(lit)
+ }
+ Err(err) => {
+ let span = token.span;
+ let token::Literal(lit) = token.kind else {
+ unreachable!();
+ };
+ self.bump();
+ report_lit_error(&self.sess, err, lit, span);
+ // Pack possible quotes and prefixes from the original literal into
+ // the error literal's symbol so they can be pretty-printed faithfully.
+ let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
+ let symbol = Symbol::intern(&suffixless_lit.to_string());
+ let lit = token::Lit::new(token::Err, symbol, lit.suffix);
+ Some(
+ MetaItemLit::from_token_lit(lit, span)
+ .unwrap_or_else(|_| unreachable!()),
+ )
+ }
}
}
- LitError::NonDecimalFloat(base) => {
- match base {
- 16 => self.sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
- 8 => self.sess.emit_err(OctalFloatLiteralNotSupported { span }),
- 2 => self.sess.emit_err(BinaryFloatLiteralNotSupported { span }),
- _ => unreachable!(),
- };
- }
- LitError::IntTooLarge => {
- self.sess.emit_err(IntLiteralTooLarge { span });
- }
+ None => None,
}
}
@@ -1953,8 +1912,8 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let minus_present = self.eat(&token::BinOp(token::Minus));
- let lit = self.parse_lit()?;
- let expr = self.mk_expr(lit.span, ExprKind::Lit(lit));
+ let (token_lit, span) = self.parse_token_lit()?;
+ let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
if minus_present {
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
@@ -1999,6 +1958,10 @@ impl<'a> Parser<'a> {
prev_span: Span,
open_delim_span: Span,
) -> PResult<'a, ()> {
+ if !self.may_recover() {
+ return Ok(());
+ }
+
if self.token.kind == token::Comma {
if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) {
return Ok(());
@@ -2039,7 +2002,7 @@ impl<'a> Parser<'a> {
lo: Span,
blk_mode: BlockCheckMode,
) -> PResult<'a, P<Expr>> {
- if self.is_array_like_block() {
+ if self.may_recover() && self.is_array_like_block() {
if let Some(arr) = self.maybe_suggest_brackets_instead_of_braces(lo) {
return Ok(arr);
}
@@ -2082,15 +2045,15 @@ impl<'a> Parser<'a> {
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
let asyncness = if self.token.uninterpolated_span().rust_2018() {
- self.parse_asyncness()
+ self.parse_asyncness(Case::Sensitive)
} else {
Async::No
};
let capture_clause = self.parse_capture_clause()?;
- let decl = self.parse_fn_block_decl()?;
+ let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?;
let decl_hi = self.prev_token.span;
- let mut body = match decl.output {
+ let mut body = match fn_decl.output {
FnRetTy::Default(_) => {
let restrictions = self.restrictions - Restrictions::STMT_EXPR;
self.parse_expr_res(restrictions, None)?
@@ -2109,12 +2072,7 @@ impl<'a> Parser<'a> {
if self.token.kind == TokenKind::Semi
&& matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
- // HACK: This is needed so we can detect whether we're inside a macro,
- // where regular assumptions about what tokens can follow other tokens
- // don't necessarily apply.
&& self.may_recover()
- // FIXME(Nilstrieb): Remove this check once `may_recover` actually stops recovery
- && self.subparser_name.is_none()
{
// It is likely that the closure body is a block but where the
// braces have been removed. We will recover and eat the next
@@ -2126,15 +2084,16 @@ impl<'a> Parser<'a> {
let closure = self.mk_expr(
lo.to(body.span),
- ExprKind::Closure(
+ ExprKind::Closure(Box::new(ast::Closure {
binder,
capture_clause,
asyncness,
movability,
- decl,
+ fn_decl,
body,
- lo.to(decl_hi),
- ),
+ fn_decl_span: lo.to(decl_hi),
+ fn_arg_span,
+ })),
);
// Disable recovery for closure body
@@ -2162,7 +2121,9 @@ impl<'a> Parser<'a> {
}
/// Parses the `|arg, arg|` header of a closure.
- fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
+ fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
+ let arg_start = self.token.span.lo();
+
let inputs = if self.eat(&token::OrOr) {
Vec::new()
} else {
@@ -2178,10 +2139,11 @@ impl<'a> Parser<'a> {
self.expect_or()?;
args
};
+ let arg_span = self.prev_token.span.with_lo(arg_start);
let output =
self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?;
- Ok(P(FnDecl { inputs, output }))
+ Ok((P(FnDecl { inputs, output }), arg_span))
}
/// Parses a parameter in a closure header (e.g., `|arg, arg|`).
@@ -2263,13 +2225,14 @@ impl<'a> Parser<'a> {
self.mk_block_err(cond_span.shrink_to_hi())
}
} else {
- let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+ let attrs = self.parse_outer_attributes()?; // For recovery.
let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
self.parse_block()?
} else {
if let Some(block) = recover_block_from_condition(self) {
block
} else {
+ self.error_on_extra_if(&cond)?;
// Parse block, which will always fail, but we can add a nice note to the error
self.parse_block().map_err(|mut err| {
err.span_note(
@@ -2280,7 +2243,7 @@ impl<'a> Parser<'a> {
})?
}
};
- self.error_on_if_block_attrs(lo, false, block.span, &attrs);
+ self.error_on_if_block_attrs(lo, false, block.span, attrs);
block
};
let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None };
@@ -2321,7 +2284,15 @@ impl<'a> Parser<'a> {
RecoverColon::Yes,
CommaRecoveryMode::LikelyTuple,
)?;
- self.expect(&token::Eq)?;
+ if self.token == token::EqEq {
+ self.sess.emit_err(ExpectedEqForLetExpr {
+ span: self.token.span,
+ sugg_span: self.token.span,
+ });
+ self.bump();
+ } else {
+ self.expect(&token::Eq)?;
+ }
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
})?;
@@ -2333,7 +2304,7 @@ impl<'a> Parser<'a> {
/// Parses an `else { ... }` expression (`else` token already eaten).
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
let else_span = self.prev_token.span; // `else`
- let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+ let attrs = self.parse_outer_attributes()?; // For recovery.
let expr = if self.eat_keyword(kw::If) {
self.parse_if_expr()?
} else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
@@ -2368,7 +2339,7 @@ impl<'a> Parser<'a> {
},
}
};
- self.error_on_if_block_attrs(else_span, true, expr.span, &attrs);
+ self.error_on_if_block_attrs(else_span, true, expr.span, attrs);
Ok(expr)
}
@@ -2377,8 +2348,13 @@ impl<'a> Parser<'a> {
ctx_span: Span,
is_ctx_else: bool,
branch_span: Span,
- attrs: &[ast::Attribute],
+ attrs: AttrWrapper,
) {
+ if attrs.is_empty() {
+ return;
+ }
+
+ let attrs: &[ast::Attribute] = &attrs.take_for_recovery(self.sess);
let (attributes, last) = match attrs {
[] => return,
[x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
@@ -2393,6 +2369,16 @@ impl<'a> Parser<'a> {
});
}
+ fn error_on_extra_if(&mut self, cond: &P<Expr>) -> PResult<'a, ()> {
+ if let ExprKind::Binary(Spanned { span: binop_span, node: binop}, _, right) = &cond.kind &&
+ let BinOpKind::And = binop &&
+ let ExprKind::If(cond, ..) = &right.kind {
+ Err(self.sess.create_err(UnexpectedIfWithIf(binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()))))
+ } else {
+ Ok(())
+ }
+ }
+
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
fn parse_for_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
// Record whether we are about to parse `for (`.
@@ -2456,10 +2442,11 @@ impl<'a> Parser<'a> {
/// Parses `loop { ... }` (`loop` token already eaten).
fn parse_loop_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+ let loop_span = self.prev_token.span;
let (attrs, body) = self.parse_inner_attrs_and_block()?;
Ok(self.mk_expr_with_attrs(
lo.to(self.prev_token.span),
- ExprKind::Loop(body, opt_label),
+ ExprKind::Loop(body, opt_label, loop_span),
attrs,
))
}
@@ -2602,8 +2589,8 @@ impl<'a> Parser<'a> {
// Used to check the `let_chains` and `if_let_guard` features mostly by scanning
// `&&` tokens.
fn check_let_expr(expr: &Expr) -> (bool, bool) {
- match expr.kind {
- ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, ref lhs, ref rhs) => {
+ match &expr.kind {
+ ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
let lhs_rslt = check_let_expr(lhs);
let rhs_rslt = check_let_expr(rhs);
(lhs_rslt.0 || rhs_rslt.0, false)
@@ -2826,7 +2813,7 @@ impl<'a> Parser<'a> {
fn maybe_parse_struct_expr(
&mut self,
- qself: Option<&ast::QSelf>,
+ qself: &Option<P<ast::QSelf>>,
path: &ast::Path,
) -> Option<PResult<'a, P<Expr>>> {
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
@@ -2834,7 +2821,7 @@ impl<'a> Parser<'a> {
if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
return Some(Err(err));
}
- let expr = self.parse_struct_expr(qself.cloned(), path.clone(), true);
+ let expr = self.parse_struct_expr(qself.clone(), path.clone(), true);
if let (Ok(expr), false) = (&expr, struct_allowed) {
// This is a struct literal, but we don't can't accept them here.
self.sess.emit_err(StructLiteralNotAllowedHere {
@@ -2867,7 +2854,7 @@ impl<'a> Parser<'a> {
};
while self.token != token::CloseDelim(close_delim) {
- if self.eat(&token::DotDot) {
+ if self.eat(&token::DotDot) || self.recover_struct_field_dots(close_delim) {
let exp_span = self.prev_token.span;
// We permit `.. }` on the left-hand side of a destructuring assignment.
if self.check(&token::CloseDelim(close_delim)) {
@@ -2965,7 +2952,7 @@ impl<'a> Parser<'a> {
/// Precondition: already parsed the '{'.
pub(super) fn parse_struct_expr(
&mut self,
- qself: Option<ast::QSelf>,
+ qself: Option<P<ast::QSelf>>,
pth: ast::Path,
recover: bool,
) -> PResult<'a, P<Expr>> {
@@ -3014,6 +3001,18 @@ impl<'a> Parser<'a> {
self.recover_stmt();
}
+ fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool {
+ if !self.look_ahead(1, |t| *t == token::CloseDelim(close_delim))
+ && self.eat(&token::DotDotDot)
+ {
+ // recover from typo of `...`, suggest `..`
+ let span = self.prev_token.span;
+ self.sess.emit_err(MissingDotDot { token_span: span, sugg_span: span });
+ return true;
+ }
+ false
+ }
+
/// Parses `ident (COLON expr)?`.
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
let attrs = self.parse_outer_attributes()?;
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index bda301c52..03f25392a 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -3,17 +3,17 @@ use crate::errors::{DocCommentDoesNotDocumentAnything, UseEmptyBlockNotSemi};
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
-
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
+use rustc_ast::util::case::Case;
use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
use rustc_ast::{BindingAnnotation, Block, FnDecl, FnSig, Param, SelfKind};
use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, VariantData};
use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
-use rustc_ast::{MacArgs, MacCall, MacDelimiter};
+use rustc_ast::{MacCall, MacDelimiter};
use rustc_ast_pretty::pprust;
use rustc_errors::{struct_span_err, Applicability, IntoDiagnostic, PResult, StashKey};
use rustc_span::edition::Edition;
@@ -21,9 +21,10 @@ use rustc_span::lev_distance::lev_distance;
use rustc_span::source_map::{self, Span};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::DUMMY_SP;
-
use std::convert::TryFrom;
use std::mem;
+use thin_vec::ThinVec;
+use tracing::debug;
impl<'a> Parser<'a> {
/// Parses a source module as a crate. This is the main entry point for the parser.
@@ -34,7 +35,7 @@ impl<'a> Parser<'a> {
/// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> {
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
self.expect_keyword(kw::Mod)?;
let id = self.parse_ident()?;
let mod_kind = if self.eat(&token::Semi) {
@@ -143,8 +144,15 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let vis = self.parse_visibility(FollowedByType::No)?;
let mut def = self.parse_defaultness();
- let kind =
- self.parse_item_kind(&mut attrs, mac_allowed, lo, &vis, &mut def, fn_parse_mode)?;
+ let kind = self.parse_item_kind(
+ &mut attrs,
+ mac_allowed,
+ lo,
+ &vis,
+ &mut def,
+ fn_parse_mode,
+ Case::Sensitive,
+ )?;
if let Some((ident, kind)) = kind {
self.error_on_unconsumed_default(def, &kind);
let span = lo.to(self.prev_token.span);
@@ -205,16 +213,18 @@ impl<'a> Parser<'a> {
vis: &Visibility,
def: &mut Defaultness,
fn_parse_mode: FnParseMode,
+ case: Case,
) -> PResult<'a, Option<ItemInfo>> {
let def_final = def == &Defaultness::Final;
- let mut def = || mem::replace(def, Defaultness::Final);
+ let mut def_ = || mem::replace(def, Defaultness::Final);
- let info = if self.eat_keyword(kw::Use) {
+ let info = if self.eat_keyword_case(kw::Use, case) {
self.parse_use_item()?
- } else if self.check_fn_front_matter(def_final) {
+ } else if self.check_fn_front_matter(def_final, case) {
// FUNCTION ITEM
- let (ident, sig, generics, body) = self.parse_fn(attrs, fn_parse_mode, lo, vis)?;
- (ident, ItemKind::Fn(Box::new(Fn { defaultness: def(), sig, generics, body })))
+ let (ident, sig, generics, body) =
+ self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
+ (ident, ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, body })))
} else if self.eat_keyword(kw::Extern) {
if self.eat_keyword(kw::Crate) {
// EXTERN CRATE
@@ -225,7 +235,7 @@ impl<'a> Parser<'a> {
}
} else if self.is_unsafe_foreign_mod() {
// EXTERN BLOCK
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
self.expect_keyword(kw::Extern)?;
self.parse_item_foreign_mod(attrs, unsafety)?
} else if self.is_static_global() {
@@ -234,15 +244,15 @@ impl<'a> Parser<'a> {
let m = self.parse_mutability();
let (ident, ty, expr) = self.parse_item_global(Some(m))?;
(ident, ItemKind::Static(ty, m, expr))
- } else if let Const::Yes(const_span) = self.parse_constness() {
+ } else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) {
// CONST ITEM
if self.token.is_keyword(kw::Impl) {
// recover from `const impl`, suggest `impl const`
- self.recover_const_impl(const_span, attrs, def())?
+ self.recover_const_impl(const_span, attrs, def_())?
} else {
self.recover_const_mut(const_span);
let (ident, ty, expr) = self.parse_item_global(None)?;
- (ident, ItemKind::Const(def(), ty, expr))
+ (ident, ItemKind::Const(def_(), ty, expr))
}
} else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
// TRAIT ITEM
@@ -251,7 +261,7 @@ impl<'a> Parser<'a> {
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Impl])
{
// IMPL ITEM
- self.parse_item_impl(attrs, def())?
+ self.parse_item_impl(attrs, def_())?
} else if self.check_keyword(kw::Mod)
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Mod])
{
@@ -259,7 +269,7 @@ impl<'a> Parser<'a> {
self.parse_item_mod(attrs)?
} else if self.eat_keyword(kw::Type) {
// TYPE ITEM
- self.parse_type_alias(def())?
+ self.parse_type_alias(def_())?
} else if self.eat_keyword(kw::Enum) {
// ENUM ITEM
self.parse_item_enum()?
@@ -286,6 +296,19 @@ impl<'a> Parser<'a> {
} else if self.isnt_macro_invocation() && vis.kind.is_pub() {
self.recover_missing_kw_before_item()?;
return Ok(None);
+ } else if self.isnt_macro_invocation() && case == Case::Sensitive {
+ _ = def_;
+
+ // Recover wrong cased keywords
+ return self.parse_item_kind(
+ attrs,
+ macros_allowed,
+ lo,
+ vis,
+ def,
+ fn_parse_mode,
+ Case::Insensitive,
+ );
} else if macros_allowed && self.check_path() {
// MACRO INVOCATION ITEM
(Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?)))
@@ -449,7 +472,7 @@ impl<'a> Parser<'a> {
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
self.expect(&token::Not)?; // `!`
- match self.parse_mac_args() {
+ match self.parse_delim_args() {
// `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
Ok(args) => {
self.eat_semi_for_macro_if_needed(&args);
@@ -538,7 +561,7 @@ impl<'a> Parser<'a> {
attrs: &mut AttrVec,
defaultness: Defaultness,
) -> PResult<'a, ItemInfo> {
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
self.expect_keyword(kw::Impl)?;
// First, parse generic parameters if necessary.
@@ -552,7 +575,7 @@ impl<'a> Parser<'a> {
generics
};
- let constness = self.parse_constness();
+ let constness = self.parse_constness(Case::Sensitive);
if let Const::Yes(span) = constness {
self.sess.gated_spans.gate(sym::const_trait_impl, span);
}
@@ -796,7 +819,7 @@ impl<'a> Parser<'a> {
/// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
// Parse optional `auto` prefix.
let is_auto = if self.eat_keyword(kw::Auto) { IsAuto::Yes } else { IsAuto::No };
@@ -950,7 +973,8 @@ impl<'a> Parser<'a> {
fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
let lo = self.token.span;
- let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None };
+ let mut prefix =
+ ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
|| self.check(&token::BinOp(token::Star))
|| self.is_import_coupler()
@@ -971,7 +995,24 @@ impl<'a> Parser<'a> {
if self.eat(&token::ModSep) {
self.parse_use_tree_glob_or_nested()?
} else {
- UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID)
+ // Recover from using a colon as path separator.
+ while self.eat_noexpect(&token::Colon) {
+ self.struct_span_err(self.prev_token.span, "expected `::`, found `:`")
+ .span_suggestion_short(
+ self.prev_token.span,
+ "use double colon",
+ "::",
+ Applicability::MachineApplicable,
+ )
+ .note_once("import paths are delimited using `::`")
+ .emit();
+
+ // We parse the rest of the path and append it to the original prefix.
+ self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
+ prefix.span = lo.to(self.prev_token.span);
+ }
+
+ UseTreeKind::Simple(self.parse_rename()?)
}
};
@@ -1215,8 +1256,8 @@ impl<'a> Parser<'a> {
}
};
- match impl_info.1 {
- ItemKind::Impl(box Impl { of_trait: Some(ref trai), ref mut constness, .. }) => {
+ match &mut impl_info.1 {
+ ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => {
*constness = Const::Yes(const_span);
let before_trait = trai.path.span.shrink_to_lo();
@@ -1373,7 +1414,10 @@ impl<'a> Parser<'a> {
Ok((Some(vr), TrailingToken::MaybeComma))
},
- )
+ ).map_err(|mut err|{
+ err.help("enum variants can be `Variant`, `Variant = <integer>`, `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`");
+ err
+ })
}
/// Parses `struct Foo { ... }`.
@@ -1745,7 +1789,7 @@ impl<'a> Parser<'a> {
let (ident, is_raw) = self.ident_or_err()?;
if !is_raw && ident.is_reserved() {
let snapshot = self.create_snapshot_for_diagnostic();
- let err = if self.check_fn_front_matter(false) {
+ let err = if self.check_fn_front_matter(false, Case::Sensitive) {
let inherited_vis = Visibility {
span: rustc_span::DUMMY_SP,
kind: VisibilityKind::Inherited,
@@ -1753,7 +1797,13 @@ impl<'a> Parser<'a> {
};
// We use `parse_fn` to get a span for the function
let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
- match self.parse_fn(&mut AttrVec::new(), fn_parse_mode, lo, &inherited_vis) {
+ match self.parse_fn(
+ &mut AttrVec::new(),
+ fn_parse_mode,
+ lo,
+ &inherited_vis,
+ Case::Insensitive,
+ ) {
Ok(_) => {
let mut err = self.struct_span_err(
lo.to(self.prev_token.span),
@@ -1827,7 +1877,7 @@ impl<'a> Parser<'a> {
fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?;
let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
- self.parse_mac_args()? // `MacBody`
+ self.parse_delim_args()? // `MacBody`
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let params = self.parse_token_tree(); // `MacParams`
let pspan = params.span();
@@ -1840,7 +1890,7 @@ impl<'a> Parser<'a> {
let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
let tokens = TokenStream::new(vec![params, arrow, body]);
let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
- P(MacArgs::Delimited(dspan, MacDelimiter::Brace, tokens))
+ P(DelimArgs { dspan, delim: MacDelimiter::Brace, tokens })
} else {
return self.unexpected();
};
@@ -1895,7 +1945,7 @@ impl<'a> Parser<'a> {
.emit();
}
- let body = self.parse_mac_args()?;
+ let body = self.parse_delim_args()?;
self.eat_semi_for_macro_if_needed(&body);
self.complain_if_pub_macro(vis, true);
@@ -1934,14 +1984,14 @@ impl<'a> Parser<'a> {
}
}
- fn eat_semi_for_macro_if_needed(&mut self, args: &MacArgs) {
+ fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
if args.need_semicolon() && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item(args);
}
}
- fn report_invalid_macro_expansion_item(&self, args: &MacArgs) {
- let span = args.span().expect("undelimited macro call");
+ fn report_invalid_macro_expansion_item(&self, args: &DelimArgs) {
+ let span = args.dspan.entire();
let mut err = self.struct_span_err(
span,
"macros that expand to items must be delimited with braces or followed by a semicolon",
@@ -1950,10 +2000,7 @@ impl<'a> Parser<'a> {
// macros within the same crate (that we can fix), which is sad.
if !span.from_expansion() {
if self.unclosed_delims.is_empty() {
- let DelimSpan { open, close } = match args {
- MacArgs::Empty | MacArgs::Eq(..) => unreachable!(),
- MacArgs::Delimited(dspan, ..) => *dspan,
- };
+ let DelimSpan { open, close } = args.dspan;
err.multipart_suggestion(
"change the delimiters to curly braces",
vec![(open, "{".to_string()), (close, '}'.to_string())],
@@ -2077,8 +2124,9 @@ impl<'a> Parser<'a> {
fn_parse_mode: FnParseMode,
sig_lo: Span,
vis: &Visibility,
+ case: Case,
) -> PResult<'a, (Ident, FnSig, Generics, Option<P<Block>>)> {
- let header = self.parse_fn_front_matter(vis)?; // `const ... fn`
+ let header = self.parse_fn_front_matter(vis, case)?; // `const ... fn`
let ident = self.parse_ident()?; // `foo`
let mut generics = self.parse_generics()?; // `<'a, T, ...>`
let decl =
@@ -2155,7 +2203,7 @@ impl<'a> Parser<'a> {
///
/// `check_pub` adds additional `pub` to the checks in case users place it
/// wrongly, can be used to ensure `pub` never comes after `default`.
- pub(super) fn check_fn_front_matter(&mut self, check_pub: bool) -> bool {
+ pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
// We use an over-approximation here.
// `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
// `pub` is added in case users got confused with the ordering like `async pub fn`,
@@ -2165,23 +2213,30 @@ impl<'a> Parser<'a> {
} else {
&[kw::Const, kw::Async, kw::Unsafe, kw::Extern]
};
- self.check_keyword(kw::Fn) // Definitely an `fn`.
+ self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
// `$qual fn` or `$qual $qual`:
- || quals.iter().any(|&kw| self.check_keyword(kw))
+ || quals.iter().any(|&kw| self.check_keyword_case(kw, case))
&& self.look_ahead(1, |t| {
// `$qual fn`, e.g. `const fn` or `async fn`.
- t.is_keyword(kw::Fn)
+ t.is_keyword_case(kw::Fn, case)
// Two qualifiers `$qual $qual` is enough, e.g. `async unsafe`.
- || t.is_non_raw_ident_where(|i| quals.contains(&i.name)
- // Rule out 2015 `const async: T = val`.
- && i.is_reserved()
+ || (
+ (
+ t.is_non_raw_ident_where(|i|
+ quals.contains(&i.name)
+ // Rule out 2015 `const async: T = val`.
+ && i.is_reserved()
+ )
+ || case == Case::Insensitive
+ && t.is_non_raw_ident_where(|i| quals.iter().any(|qual| qual.as_str() == i.name.as_str().to_lowercase()))
+ )
// Rule out unsafe extern block.
&& !self.is_unsafe_foreign_mod())
})
// `extern ABI fn`
- || self.check_keyword(kw::Extern)
+ || self.check_keyword_case(kw::Extern, case)
&& self.look_ahead(1, |t| t.can_begin_literal_maybe_minus())
- && self.look_ahead(2, |t| t.is_keyword(kw::Fn))
+ && self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case))
}
/// Parses all the "front matter" (or "qualifiers") for a `fn` declaration,
@@ -2195,24 +2250,28 @@ impl<'a> Parser<'a> {
///
/// `vis` represents the visibility that was already parsed, if any. Use
/// `Visibility::Inherited` when no visibility is known.
- pub(super) fn parse_fn_front_matter(&mut self, orig_vis: &Visibility) -> PResult<'a, FnHeader> {
+ pub(super) fn parse_fn_front_matter(
+ &mut self,
+ orig_vis: &Visibility,
+ case: Case,
+ ) -> PResult<'a, FnHeader> {
let sp_start = self.token.span;
- let constness = self.parse_constness();
+ let constness = self.parse_constness(case);
let async_start_sp = self.token.span;
- let asyncness = self.parse_asyncness();
+ let asyncness = self.parse_asyncness(case);
let unsafe_start_sp = self.token.span;
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(case);
let ext_start_sp = self.token.span;
- let ext = self.parse_extern();
+ let ext = self.parse_extern(case);
if let Async::Yes { span, .. } = asyncness {
self.ban_async_in_2015(span);
}
- if !self.eat_keyword(kw::Fn) {
+ if !self.eat_keyword_case(kw::Fn, case) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
@@ -2541,8 +2600,8 @@ impl<'a> Parser<'a> {
}
fn is_named_param(&self) -> bool {
- let offset = match self.token.kind {
- token::Interpolated(ref nt) => match **nt {
+ let offset = match &self.token.kind {
+ token::Interpolated(nt) => match **nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
},
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 5fe29062b..bebb01266 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -22,10 +22,11 @@ use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::AttributesData;
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::util::case::Case;
use rustc_ast::AttrId;
use rustc_ast::DUMMY_NODE_ID;
-use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, Extern};
-use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacArgsEq, MacDelimiter, Mutability, StrLit};
+use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, DelimArgs, Extern};
+use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit};
use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
@@ -104,6 +105,7 @@ macro_rules! maybe_whole {
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery
+ && $self.may_recover()
&& $self.look_ahead(1, |t| t == &token::ModSep)
&& let token::Interpolated(nt) = &$self.token.kind
&& let token::NtTy(ty) = &**nt
@@ -382,8 +384,8 @@ enum TokenType {
impl TokenType {
fn to_string(&self) -> String {
- match *self {
- TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
+ match self {
+ TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
@@ -501,8 +503,8 @@ impl<'a> Parser<'a> {
parser
}
- pub fn forbid_recovery(mut self) -> Self {
- self.recovery = Recovery::Forbidden;
+ pub fn recovery(mut self, recovery: Recovery) -> Self {
+ self.recovery = recovery;
self
}
@@ -635,6 +637,20 @@ impl<'a> Parser<'a> {
self.token.is_keyword(kw)
}
+ fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
+ if self.check_keyword(kw) {
+ return true;
+ }
+
+ if case == Case::Insensitive
+ && let Some((ident, /* is_raw */ false)) = self.token.ident()
+ && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
+ true
+ } else {
+ false
+ }
+ }
+
/// If the next token is the given keyword, eats it and returns `true`.
/// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
// Public for rustfmt usage.
@@ -647,6 +663,33 @@ impl<'a> Parser<'a> {
}
}
+ /// Eats a keyword, optionally ignoring the case.
+ /// If the case differs (and is ignored) an error is issued.
+ /// This is useful for recovery.
+ fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
+ if self.eat_keyword(kw) {
+ return true;
+ }
+
+ if case == Case::Insensitive
+ && let Some((ident, /* is_raw */ false)) = self.token.ident()
+ && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
+ self
+ .struct_span_err(ident.span, format!("keyword `{kw}` is written in a wrong case"))
+ .span_suggestion(
+ ident.span,
+ "write it in the correct case",
+ kw,
+ Applicability::MachineApplicable
+ ).emit();
+
+ self.bump();
+ return true;
+ }
+
+ false
+ }
+
fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
@@ -695,8 +738,8 @@ impl<'a> Parser<'a> {
fn check_inline_const(&self, dist: usize) -> bool {
self.is_keyword_ahead(dist, &[kw::Const])
- && self.look_ahead(dist + 1, |t| match t.kind {
- token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
+ && self.look_ahead(dist + 1, |t| match &t.kind {
+ token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
token::OpenDelim(Delimiter::Brace) => true,
_ => false,
})
@@ -817,7 +860,7 @@ impl<'a> Parser<'a> {
if let token::CloseDelim(..) | token::Eof = self.token.kind {
break;
}
- if let Some(ref t) = sep.sep {
+ if let Some(t) = &sep.sep {
if first {
first = false;
} else {
@@ -852,7 +895,7 @@ impl<'a> Parser<'a> {
_ => {
// Attempt to keep parsing if it was a similar separator.
- if let Some(ref tokens) = t.similar_tokens() {
+ if let Some(tokens) = t.similar_tokens() {
if tokens.contains(&self.token.kind) && !unclosed_delims {
self.bump();
}
@@ -900,6 +943,10 @@ impl<'a> Parser<'a> {
Err(e) => {
// Parsing failed, therefore it must be something more serious
// than just a missing separator.
+ for xx in &e.children {
+ // propagate the help message from sub error 'e' to main error 'expect_err;
+ expect_err.children.push(xx.clone());
+ }
expect_err.emit();
e.cancel();
@@ -1126,8 +1173,8 @@ impl<'a> Parser<'a> {
}
/// Parses asyncness: `async` or nothing.
- fn parse_asyncness(&mut self) -> Async {
- if self.eat_keyword(kw::Async) {
+ fn parse_asyncness(&mut self, case: Case) -> Async {
+ if self.eat_keyword_case(kw::Async, case) {
let span = self.prev_token.uninterpolated_span();
Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
} else {
@@ -1136,8 +1183,8 @@ impl<'a> Parser<'a> {
}
/// Parses unsafety: `unsafe` or nothing.
- fn parse_unsafety(&mut self) -> Unsafe {
- if self.eat_keyword(kw::Unsafe) {
+ fn parse_unsafety(&mut self, case: Case) -> Unsafe {
+ if self.eat_keyword_case(kw::Unsafe, case) {
Unsafe::Yes(self.prev_token.uninterpolated_span())
} else {
Unsafe::No
@@ -1145,10 +1192,10 @@ impl<'a> Parser<'a> {
}
/// Parses constness: `const` or nothing.
- fn parse_constness(&mut self) -> Const {
+ fn parse_constness(&mut self, case: Case) -> Const {
// Avoid const blocks to be parsed as const items
if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
- && self.eat_keyword(kw::Const)
+ && self.eat_keyword_case(kw::Const, case)
{
Const::Yes(self.prev_token.uninterpolated_span())
} else {
@@ -1206,39 +1253,40 @@ impl<'a> Parser<'a> {
}
}
- fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
- self.parse_mac_args_common(true).map(P)
+ fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
+ if let Some(args) = self.parse_delim_args_inner() { Ok(P(args)) } else { self.unexpected() }
}
- fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
- self.parse_mac_args_common(false)
+ fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
+ Ok(if let Some(args) = self.parse_delim_args_inner() {
+ AttrArgs::Delimited(args)
+ } else {
+ if self.eat(&token::Eq) {
+ let eq_span = self.prev_token.span;
+ AttrArgs::Eq(eq_span, AttrArgsEq::Ast(self.parse_expr_force_collect()?))
+ } else {
+ AttrArgs::Empty
+ }
+ })
}
- fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
- Ok(
- if self.check(&token::OpenDelim(Delimiter::Parenthesis))
- || self.check(&token::OpenDelim(Delimiter::Bracket))
- || self.check(&token::OpenDelim(Delimiter::Brace))
- {
- match self.parse_token_tree() {
- TokenTree::Delimited(dspan, delim, tokens) =>
- // We've confirmed above that there is a delimiter so unwrapping is OK.
- {
- MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
- }
- _ => unreachable!(),
- }
- } else if !delimited_only {
- if self.eat(&token::Eq) {
- let eq_span = self.prev_token.span;
- MacArgs::Eq(eq_span, MacArgsEq::Ast(self.parse_expr_force_collect()?))
- } else {
- MacArgs::Empty
- }
- } else {
- return self.unexpected();
- },
- )
+ fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
+ if self.check(&token::OpenDelim(Delimiter::Parenthesis))
+ || self.check(&token::OpenDelim(Delimiter::Bracket))
+ || self.check(&token::OpenDelim(Delimiter::Brace))
+ {
+ match self.parse_token_tree() {
+ // We've confirmed above that there is a delimiter so unwrapping is OK.
+ TokenTree::Delimited(dspan, delim, tokens) => Some(DelimArgs {
+ dspan,
+ delim: MacDelimiter::from_token(delim).unwrap(),
+ tokens,
+ }),
+ _ => unreachable!(),
+ }
+ } else {
+ None
+ }
}
fn parse_or_use_outer_attributes(
@@ -1403,8 +1451,8 @@ impl<'a> Parser<'a> {
}
/// Parses `extern string_literal?`.
- fn parse_extern(&mut self) -> Extern {
- if self.eat_keyword(kw::Extern) {
+ fn parse_extern(&mut self, case: Case) -> Extern {
+ if self.eat_keyword_case(kw::Extern, case) {
let mut extern_span = self.prev_token.span;
let abi = self.parse_abi();
if let Some(abi) = abi {
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 103dd8012..239ed79ce 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -42,9 +42,9 @@ impl<'a> Parser<'a> {
token::Comma | token::Ident(..) | token::Interpolated(..) => true,
_ => token.can_begin_type(),
},
- NonterminalKind::Block => match token.kind {
+ NonterminalKind::Block => match &token.kind {
token::OpenDelim(Delimiter::Brace) => true,
- token::Interpolated(ref nt) => !matches!(
+ token::Interpolated(nt) => !matches!(
**nt,
token::NtItem(_)
| token::NtPat(_)
@@ -56,16 +56,16 @@ impl<'a> Parser<'a> {
),
_ => false,
},
- NonterminalKind::Path | NonterminalKind::Meta => match token.kind {
+ NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
token::ModSep | token::Ident(..) => true,
- token::Interpolated(ref nt) => match **nt {
+ token::Interpolated(nt) => match **nt {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt),
},
_ => false,
},
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
- match token.kind {
+ match &token.kind {
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
token::OpenDelim(Delimiter::Bracket) | // slice pattern
@@ -80,13 +80,13 @@ impl<'a> Parser<'a> {
token::BinOp(token::Shl) => true, // path (double UFCS)
// leading vert `|` or-pattern
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr {..}),
- token::Interpolated(ref nt) => may_be_ident(nt),
+ token::Interpolated(nt) => may_be_ident(nt),
_ => false,
}
}
- NonterminalKind::Lifetime => match token.kind {
+ NonterminalKind::Lifetime => match &token.kind {
token::Lifetime(_) => true,
- token::Interpolated(ref nt) => {
+ token::Interpolated(nt) => {
matches!(**nt, token::NtLifetime(_))
}
_ => false,
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 52c11b4e3..cbeec951e 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
err.span_label(self_.token.span, format!("expected {}", expected));
err
});
- PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
+ PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit.token_lit)))
} else {
// Try to parse everything else as literal with optional minus
match self.parse_literal_maybe_minus() {
@@ -485,7 +485,7 @@ impl<'a> Parser<'a> {
let mut rhs = self.parse_pat_no_top_alt(None)?;
let sp = lhs.span.to(rhs.span);
- if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind {
+ if let PatKind::Ident(_, _, sub @ None) = &mut rhs.kind {
// The user inverted the order, so help them fix that.
let mut applicability = Applicability::MachineApplicable;
// FIXME(bindings_after_at): Remove this code when stabilizing the feature.
@@ -595,7 +595,7 @@ impl<'a> Parser<'a> {
self.recover_additional_muts();
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
- if let token::Interpolated(ref nt) = self.token.kind {
+ if let token::Interpolated(nt) = &self.token.kind {
if let token::NtPat(_) = **nt {
self.expected_ident_found().emit();
}
@@ -693,7 +693,7 @@ impl<'a> Parser<'a> {
/// Parse macro invocation
fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
self.bump();
- let args = self.parse_mac_args()?;
+ let args = self.parse_delim_args()?;
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
Ok(PatKind::MacCall(mac))
}
@@ -796,7 +796,7 @@ impl<'a> Parser<'a> {
/// expression syntax `...expr` for splatting in expressions.
fn parse_pat_range_to(&mut self, mut re: Spanned<RangeEnd>) -> PResult<'a, PatKind> {
let end = self.parse_pat_range_end()?;
- if let RangeEnd::Included(ref mut syn @ RangeSyntax::DotDotDot) = &mut re.node {
+ if let RangeEnd::Included(syn @ RangeSyntax::DotDotDot) = &mut re.node {
*syn = RangeSyntax::DotDotEq;
self.struct_span_err(re.span, "range-to patterns with `...` are not allowed")
.span_suggestion_short(
@@ -889,7 +889,7 @@ impl<'a> Parser<'a> {
}
/// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`).
- fn parse_pat_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+ fn parse_pat_struct(&mut self, qself: Option<P<QSelf>>, path: Path) -> PResult<'a, PatKind> {
if qself.is_some() {
// Feature gate the use of qualified paths in patterns
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
@@ -906,7 +906,11 @@ impl<'a> Parser<'a> {
}
/// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
- fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+ fn parse_pat_tuple_struct(
+ &mut self,
+ qself: Option<P<QSelf>>,
+ path: Path,
+ ) -> PResult<'a, PatKind> {
let (fields, _) = self.parse_paren_comma_seq(|p| {
p.parse_pat_allow_top_alt(
None,
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index fdc1af27f..2d432e3f5 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -11,8 +11,9 @@ use rustc_ast::{
use rustc_errors::{pluralize, Applicability, PResult};
use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym, Ident};
-
use std::mem;
+use thin_vec::ThinVec;
+use tracing::debug;
/// Specifies how to parse a path.
#[derive(Copy, Clone, PartialEq)]
@@ -48,7 +49,7 @@ impl<'a> Parser<'a> {
/// `<T as U>::a`
/// `<T as U>::F::a<S>` (without disambiguator)
/// `<T as U>::F::a::<S>` (with disambiguator)
- pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, Path)> {
+ pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (P<QSelf>, Path)> {
let lo = self.prev_token.span;
let ty = self.parse_ty()?;
@@ -63,7 +64,7 @@ impl<'a> Parser<'a> {
path_span = path_lo.to(self.prev_token.span);
} else {
path_span = self.token.span.to(self.token.span);
- path = ast::Path { segments: Vec::new(), span: path_span, tokens: None };
+ path = ast::Path { segments: ThinVec::new(), span: path_span, tokens: None };
}
// See doc comment for `unmatched_angle_bracket_count`.
@@ -77,7 +78,7 @@ impl<'a> Parser<'a> {
self.expect(&token::ModSep)?;
}
- let qself = QSelf { ty, path_span, position: path.segments.len() };
+ let qself = P(QSelf { ty, path_span, position: path.segments.len() });
self.parse_path_segments(&mut path.segments, style, None)?;
Ok((
@@ -179,7 +180,7 @@ impl<'a> Parser<'a> {
}
let lo = self.token.span;
- let mut segments = Vec::new();
+ let mut segments = ThinVec::new();
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) {
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
@@ -191,7 +192,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_path_segments(
&mut self,
- segments: &mut Vec<PathSegment>,
+ segments: &mut ThinVec<PathSegment>,
style: PathStyle,
ty_generics: Option<&Generics>,
) -> PResult<'a, ()> {
@@ -631,7 +632,9 @@ impl<'a> Parser<'a> {
/// - A single-segment path.
pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool {
match &expr.kind {
- ast::ExprKind::Block(_, _) | ast::ExprKind::Lit(_) => true,
+ ast::ExprKind::Block(_, _)
+ | ast::ExprKind::Lit(_)
+ | ast::ExprKind::IncludedBytes(..) => true,
ast::ExprKind::Unary(ast::UnOp::Neg, expr) => {
matches!(expr.kind, ast::ExprKind::Lit(_))
}
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 12753c678..42197e637 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -10,8 +10,8 @@ use super::{
use crate::errors::{
AssignmentElseNotAllowed, CompoundAssignmentExpressionInLet, ConstLetMutuallyExclusive,
DocCommentDoesNotDocumentAnything, ExpectedStatementAfterOuterAttr, InvalidCurlyInLetElse,
- InvalidExpressionInLetElse, InvalidVariableDeclaration, InvalidVariableDeclarationSub,
- WrapExpressionInParentheses,
+ InvalidExpressionInLetElse, InvalidIdentiferStartsWithNumber, InvalidVariableDeclaration,
+ InvalidVariableDeclarationSub, WrapExpressionInParentheses,
};
use crate::maybe_whole;
@@ -19,7 +19,7 @@ use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::util::classify;
-use rustc_ast::{AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
+use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
use rustc_ast::{StmtKind, DUMMY_NODE_ID};
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
@@ -72,14 +72,22 @@ impl<'a> Parser<'a> {
Ok(Some(if self.token.is_keyword(kw::Let) {
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
- } else if self.is_kw_followed_by_ident(kw::Mut) {
- self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::MissingLet)?
- } else if self.is_kw_followed_by_ident(kw::Auto) {
+ } else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
+ self.recover_stmt_local_after_let(lo, attrs, InvalidVariableDeclarationSub::MissingLet)?
+ } else if self.is_kw_followed_by_ident(kw::Auto) && self.may_recover() {
self.bump(); // `auto`
- self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::UseLetNotAuto)?
- } else if self.is_kw_followed_by_ident(sym::var) {
+ self.recover_stmt_local_after_let(
+ lo,
+ attrs,
+ InvalidVariableDeclarationSub::UseLetNotAuto,
+ )?
+ } else if self.is_kw_followed_by_ident(sym::var) && self.may_recover() {
self.bump(); // `var`
- self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::UseLetNotVar)?
+ self.recover_stmt_local_after_let(
+ lo,
+ attrs,
+ InvalidVariableDeclarationSub::UseLetNotVar,
+ )?
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
@@ -101,7 +109,7 @@ impl<'a> Parser<'a> {
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if self.eat(&token::Semi) {
// Do not attempt to parse an expression if we're done here.
- self.error_outer_attrs(&attrs.take_for_recovery());
+ self.error_outer_attrs(attrs);
self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(Delimiter::Brace) {
// Remainder are line-expr stmts.
@@ -120,7 +128,7 @@ impl<'a> Parser<'a> {
}
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
} else {
- self.error_outer_attrs(&attrs.take_for_recovery());
+ self.error_outer_attrs(attrs);
return Ok(None);
}))
}
@@ -167,14 +175,13 @@ impl<'a> Parser<'a> {
/// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
/// At this point, the `!` token after the path has already been eaten.
fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
- let args = self.parse_mac_args()?;
- let delim = args.delim();
+ let args = self.parse_delim_args()?;
+ let delim = args.delim.to_token();
let hi = self.prev_token.span;
let style = match delim {
- Some(Delimiter::Brace) => MacStmtStyle::Braces,
- Some(_) => MacStmtStyle::NoBraces,
- None => unreachable!(),
+ Delimiter::Brace => MacStmtStyle::Braces,
+ _ => MacStmtStyle::NoBraces,
};
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
@@ -199,8 +206,10 @@ impl<'a> Parser<'a> {
/// Error on outer attributes in this context.
/// Also error if the previous token was a doc comment.
- fn error_outer_attrs(&self, attrs: &[Attribute]) {
- if let [.., last] = attrs {
+ fn error_outer_attrs(&self, attrs: AttrWrapper) {
+ if !attrs.is_empty()
+ && let attrs = attrs.take_for_recovery(self.sess)
+ && let attrs @ [.., last] = &*attrs {
if last.is_doc_comment() {
self.sess.emit_err(DocCommentDoesNotDocumentAnything {
span: last.span,
@@ -212,13 +221,21 @@ impl<'a> Parser<'a> {
}
}
- fn recover_stmt_local(
+ fn recover_stmt_local_after_let(
&mut self,
lo: Span,
attrs: AttrWrapper,
subdiagnostic: fn(Span) -> InvalidVariableDeclarationSub,
) -> PResult<'a, Stmt> {
- let stmt = self.recover_local_after_let(lo, attrs)?;
+ let stmt =
+ self.collect_tokens_trailing_token(attrs, ForceCollect::Yes, |this, attrs| {
+ let local = this.parse_local(attrs)?;
+ // FIXME - maybe capture semicolon in recovery?
+ Ok((
+ this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
+ TrailingToken::None,
+ ))
+ })?;
self.sess.emit_err(InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
Ok(stmt)
}
@@ -242,17 +259,6 @@ impl<'a> Parser<'a> {
})
}
- fn recover_local_after_let(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
- self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
- let local = this.parse_local(attrs)?;
- // FIXME - maybe capture semicolon in recovery?
- Ok((
- this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
- TrailingToken::None,
- ))
- })
- }
-
/// Parses a local variable declaration.
fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
let lo = self.prev_token.span;
@@ -262,6 +268,7 @@ impl<'a> Parser<'a> {
self.bump();
}
+ self.report_invalid_identifier_error()?;
let (pat, colon) = self.parse_pat_before_ty(None, RecoverComma::Yes, "`let` bindings")?;
let (err, ty) = if colon {
@@ -353,6 +360,17 @@ impl<'a> Parser<'a> {
Ok(P(ast::Local { ty, pat, kind, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None }))
}
+ /// report error for `let 1x = 123`
+ pub fn report_invalid_identifier_error(&mut self) -> PResult<'a, ()> {
+ if let token::Literal(lit) = self.token.uninterpolate().kind &&
+ rustc_ast::MetaItemLit::from_token(&self.token).is_none() &&
+ (lit.kind == token::LitKind::Integer || lit.kind == token::LitKind::Float) &&
+ self.look_ahead(1, |t| matches!(t.kind, token::Eq) || matches!(t.kind, token::Colon ) ) {
+ return Err(self.sess.create_err(InvalidIdentiferStartsWithNumber { span: self.token.span }));
+ }
+ Ok(())
+ }
+
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
if let ast::ExprKind::Binary(op, ..) = init.kind {
if op.node.lazy() {
@@ -550,9 +568,9 @@ impl<'a> Parser<'a> {
};
let mut eat_semi = true;
- match stmt.kind {
+ match &mut stmt.kind {
// Expression without semicolon.
- StmtKind::Expr(ref mut expr)
+ StmtKind::Expr(expr)
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => {
// Just check for errors and recover; do not eat semicolon yet.
// `expect_one_of` returns PResult<'a, bool /* recovered */>
@@ -598,7 +616,7 @@ impl<'a> Parser<'a> {
}
}
StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
- StmtKind::Local(ref mut local) if let Err(e) = self.expect_semi() => {
+ StmtKind::Local(local) if let Err(e) = self.expect_semi() => {
// We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
match &mut local.kind {
LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => {
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 2a8512acf..b7206b576 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,9 +1,11 @@
use super::{Parser, PathStyle, TokenType};
+use crate::errors::{FnPtrWithGenerics, FnPtrWithGenericsSugg};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::util::case::Case;
use rustc_ast::{
self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
@@ -267,16 +269,21 @@ impl<'a> Parser<'a> {
} else if self.eat_keyword(kw::Underscore) {
// A type to be inferred `_`
TyKind::Infer
- } else if self.check_fn_front_matter(false) {
+ } else if self.check_fn_front_matter(false, Case::Sensitive) {
// Function pointer type
- self.parse_ty_bare_fn(lo, Vec::new(), recover_return_sign)?
+ self.parse_ty_bare_fn(lo, Vec::new(), None, recover_return_sign)?
} else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
- if self.check_fn_front_matter(false) {
- self.parse_ty_bare_fn(lo, lifetime_defs, recover_return_sign)?
+ if self.check_fn_front_matter(false, Case::Sensitive) {
+ self.parse_ty_bare_fn(
+ lo,
+ lifetime_defs,
+ Some(self.prev_token.span.shrink_to_lo()),
+ recover_return_sign,
+ )?
} else {
let path = self.parse_path(PathStyle::Type)?;
let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
@@ -401,7 +408,7 @@ impl<'a> Parser<'a> {
.span_suggestions(
span.shrink_to_hi(),
"add `mut` or `const` here",
- ["mut ".to_string(), "const ".to_string()].into_iter(),
+ ["mut ".to_string(), "const ".to_string()],
Applicability::HasPlaceholders,
)
.emit();
@@ -518,7 +525,8 @@ impl<'a> Parser<'a> {
fn parse_ty_bare_fn(
&mut self,
lo: Span,
- params: Vec<GenericParam>,
+ mut params: Vec<GenericParam>,
+ param_insertion_point: Option<Span>,
recover_return_sign: RecoverReturnSign,
) -> PResult<'a, TyKind> {
let inherited_vis = rustc_ast::Visibility {
@@ -528,7 +536,10 @@ impl<'a> Parser<'a> {
};
let span_start = self.token.span;
let ast::FnHeader { ext, unsafety, constness, asyncness } =
- self.parse_fn_front_matter(&inherited_vis)?;
+ self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
+ if self.may_recover() && self.token.kind == TokenKind::Lt {
+ self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
+ }
let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
let whole_span = lo.to(self.prev_token.span);
if let ast::Const::Yes(span) = constness {
@@ -544,6 +555,48 @@ impl<'a> Parser<'a> {
Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl, decl_span })))
}
+ /// Recover from function pointer types with a generic parameter list (e.g. `fn<'a>(&'a str)`).
+ fn recover_fn_ptr_with_generics(
+ &mut self,
+ lo: Span,
+ params: &mut Vec<GenericParam>,
+ param_insertion_point: Option<Span>,
+ ) -> PResult<'a, ()> {
+ let generics = self.parse_generics()?;
+ let arity = generics.params.len();
+
+ let mut lifetimes: Vec<_> = generics
+ .params
+ .into_iter()
+ .filter(|param| matches!(param.kind, ast::GenericParamKind::Lifetime))
+ .collect();
+
+ let sugg = if !lifetimes.is_empty() {
+ let snippet =
+ lifetimes.iter().map(|param| param.ident.as_str()).intersperse(", ").collect();
+
+ let (left, snippet) = if let Some(span) = param_insertion_point {
+ (span, if params.is_empty() { snippet } else { format!(", {snippet}") })
+ } else {
+ (lo.shrink_to_lo(), format!("for<{snippet}> "))
+ };
+
+ Some(FnPtrWithGenericsSugg {
+ left,
+ snippet,
+ right: generics.span,
+ arity,
+ for_param_list_exists: param_insertion_point.is_some(),
+ })
+ } else {
+ None
+ };
+
+ self.sess.emit_err(FnPtrWithGenerics { span: generics.span, sugg });
+ params.append(&mut lifetimes);
+ Ok(())
+ }
+
/// Emit an error for the given bad function pointer qualifier.
fn error_fn_ptr_bad_qualifier(&self, span: Span, qual_span: Span, qual: &str) {
self.struct_span_err(span, &format!("an `fn` pointer type cannot be `{}`", qual))
@@ -612,7 +665,7 @@ impl<'a> Parser<'a> {
// Macro invocation in type position
Ok(TyKind::MacCall(P(MacCall {
path,
- args: self.parse_mac_args()?,
+ args: self.parse_delim_args()?,
prior_type_ascription: self.last_type_ascription,
})))
} else if allow_plus == AllowPlus::Yes && self.check_plus() {