use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, }; use crate::errors::{ ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect, BinaryFloatLiteralNotSupported, BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct, ComparisonInterpretedAsGeneric, ComparisonOrShiftInterpretedAsGenericSugg, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit, ExpectedElseBlock, ExpectedExpressionFoundLet, FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt, HexadecimalFloatLiteralNotSupported, IfExpressionMissingCondition, IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub, IntLiteralTooLarge, InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub, InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth, InvalidIntLiteralWidth, InvalidInterpolatedExpression, InvalidLiteralSuffix, InvalidLiteralSuffixOnTupleIndex, InvalidLogicalOperator, InvalidLogicalOperatorSub, InvalidNumLiteralBasePrefix, InvalidNumLiteralSuffix, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator, LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel, MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray, NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub, OctalFloatLiteralNotSupported, OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields, RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere, StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel, UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses, }; use crate::maybe_recover_from_interpolated_ty_qpath; use core::mem; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::Spacing; use rustc_ast::util::classify; use rustc_ast::util::literal::LitError; use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity}; use rustc_ast::visit::Visitor; use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID}; use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind}; use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits}; use rustc_ast::{ClosureBinder, StmtKind}; use rustc_ast_pretty::pprust; use rustc_errors::{ Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult, StashKey, }; use rustc_session::errors::ExprParenthesesNeeded; use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP; use rustc_session::lint::BuiltinLintDiagnostics; use rustc_span::source_map::{self, Span, Spanned}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{BytePos, Pos}; /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of /// macro expansion). Placement of these is not as complex as I feared it would /// be. The important thing is to make sure that lookahead doesn't balk at /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = &$p.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { let e = e.clone(); $p.bump(); return Ok(e); } token::NtPath(path) => { let path = (**path).clone(); $p.bump(); return Ok($p.mk_expr($p.prev_token.span, ExprKind::Path(None, path))); } token::NtBlock(block) => { let block = block.clone(); $p.bump(); return Ok($p.mk_expr($p.prev_token.span, ExprKind::Block(block, None))); } _ => {} }; } }; } #[derive(Debug)] pub(super) enum LhsExpr { NotYetParsed, AttributesParsed(AttrWrapper), AlreadyParsed(P), } impl From> for LhsExpr { /// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)` /// and `None` into `LhsExpr::NotYetParsed`. /// /// This conversion does not allocate. fn from(o: Option) -> Self { if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed } } } impl From> for LhsExpr { /// Converts the `expr: P` into `LhsExpr::AlreadyParsed(expr)`. /// /// This conversion does not allocate. fn from(expr: P) -> Self { LhsExpr::AlreadyParsed(expr) } } impl<'a> Parser<'a> { /// Parses an expression. #[inline] pub fn parse_expr(&mut self) -> PResult<'a, P> { self.current_closure.take(); self.parse_expr_res(Restrictions::empty(), None) } /// Parses an expression, forcing tokens to be collected pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P> { self.collect_tokens_no_attrs(|this| this.parse_expr()) } pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> { self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value }) } fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P> { match self.parse_expr() { Ok(expr) => Ok(expr), Err(mut err) => match self.token.ident() { Some((Ident { name: kw::Underscore, .. }, false)) if self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` err.emit(); self.bump(); Ok(self.mk_expr(self.prev_token.span, ExprKind::Err)) } _ => Err(err), }, } } /// Parses a sequence of expressions delimited by parentheses. fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec>> { self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore()).map(|(r, _)| r) } /// Parses an expression, subject to the given restrictions. #[inline] pub(super) fn parse_expr_res( &mut self, r: Restrictions, already_parsed_attrs: Option, ) -> PResult<'a, P> { self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) } /// Parses an associative expression. /// /// This parses an expression accounting for associativity and precedence of the operators in /// the expression. #[inline] fn parse_assoc_expr( &mut self, already_parsed_attrs: Option, ) -> PResult<'a, P> { self.parse_assoc_expr_with(0, already_parsed_attrs.into()) } /// Parses an associative expression with operators of at least `min_prec` precedence. pub(super) fn parse_assoc_expr_with( &mut self, min_prec: usize, lhs: LhsExpr, ) -> PResult<'a, P> { let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs { expr } else { let attrs = match lhs { LhsExpr::AttributesParsed(attrs) => Some(attrs), _ => None, }; if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) { return self.parse_prefix_range_expr(attrs); } else { self.parse_prefix_expr(attrs)? } }; let last_type_ascription_set = self.last_type_ascription.is_some(); if !self.should_continue_as_assoc_expr(&lhs) { self.last_type_ascription = None; return Ok(lhs); } self.expected_tokens.push(TokenType::Operator); while let Some(op) = self.check_assoc_op() { // Adjust the span for interpolated LHS to point to the `$lhs` token // and not to what it refers to. let lhs_span = match self.prev_token.kind { TokenKind::Interpolated(..) => self.prev_token.span, _ => lhs.span, }; let cur_op_span = self.token.span; let restrictions = if op.node.is_assign_like() { self.restrictions & Restrictions::NO_STRUCT_LITERAL } else { self.restrictions }; let prec = op.node.precedence(); if prec < min_prec { break; } // Check for deprecated `...` syntax if self.token == token::DotDotDot && op.node == AssocOp::DotDotEq { self.err_dotdotdot_syntax(self.token.span); } if self.token == token::LArrow { self.err_larrow_operator(self.token.span); } self.bump(); if op.node.is_comparison() { if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? { return Ok(expr); } } // Look for JS' `===` and `!==` and recover if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual) && self.token.kind == token::Eq && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); let sugg = match op.node { AssocOp::Equal => "==", AssocOp::NotEqual => "!=", _ => unreachable!(), } .into(); let invalid = format!("{}=", &sugg); self.sess.emit_err(InvalidComparisonOperator { span: sp, invalid: invalid.clone(), sub: InvalidComparisonOperatorSub::Correctable { span: sp, invalid, correct: sugg, }, }); self.bump(); } // Look for PHP's `<>` and recover if op.node == AssocOp::Less && self.token.kind == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); self.sess.emit_err(InvalidComparisonOperator { span: sp, invalid: "<>".into(), sub: InvalidComparisonOperatorSub::Correctable { span: sp, invalid: "<>".into(), correct: "!=".into(), }, }); self.bump(); } // Look for C++'s `<=>` and recover if op.node == AssocOp::LessEqual && self.token.kind == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); self.sess.emit_err(InvalidComparisonOperator { span: sp, invalid: "<=>".into(), sub: InvalidComparisonOperatorSub::Spaceship(sp), }); self.bump(); } if self.prev_token == token::BinOp(token::Plus) && self.token == token::BinOp(token::Plus) && self.prev_token.span.between(self.token.span).is_empty() { let op_span = self.prev_token.span.to(self.token.span); // Eat the second `+` self.bump(); lhs = self.recover_from_postfix_increment(lhs, op_span)?; continue; } let op = op.node; // Special cases: if op == AssocOp::As { lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; continue; } else if op == AssocOp::Colon { lhs = self.parse_assoc_op_ascribe(lhs, lhs_span)?; continue; } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { // If we didn't have to handle `x..`/`x..=`, it would be pretty easy to // generalise it to the Fixity::None code. lhs = self.parse_range_expr(prec, lhs, op, cur_op_span)?; break; } let fixity = op.fixity(); let prec_adjustment = match fixity { Fixity::Right => 0, Fixity::Left => 1, // We currently have no non-associative operators that are not handled above by // the special cases. The code is here only for future convenience. Fixity::None => 1, }; let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed) })?; let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span); lhs = match op { AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight | AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual | AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); self.mk_expr(span, binary) } AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span)), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, token::Minus => BinOpKind::Sub, token::Star => BinOpKind::Mul, token::Slash => BinOpKind::Div, token::Percent => BinOpKind::Rem, token::Caret => BinOpKind::BitXor, token::And => BinOpKind::BitAnd, token::Or => BinOpKind::BitOr, token::Shl => BinOpKind::Shl, token::Shr => BinOpKind::Shr, }; let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); self.mk_expr(span, aopexpr) } AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => { self.span_bug(span, "AssocOp should have been handled by special case") } }; if let Fixity::None = fixity { break; } } if last_type_ascription_set { self.last_type_ascription = None; } Ok(lhs) } fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool { match (self.expr_is_complete(lhs), AssocOp::from_token(&self.token)) { // Semi-statement forms are odd: // See https://github.com/rust-lang/rust/issues/29071 (true, None) => false, (false, _) => true, // Continue parsing the expression. // An exhaustive check is done in the following block, but these are checked first // because they *are* ambiguous but also reasonable looking incorrect syntax, so we // want to keep their span info to improve diagnostics in these cases in a later stage. (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3` (true, Some(AssocOp::Subtract)) | // `{ 42 } -5` (true, Some(AssocOp::Add)) // `{ 42 } + 42 // If the next token is a keyword, then the tokens above *are* unambiguously incorrect: // `if x { a } else { b } && if y { c } else { d }` if !self.look_ahead(1, |t| t.is_used_keyword()) => { // These cases are ambiguous and can't be identified in the parser alone. let sp = self.sess.source_map().start_point(self.token.span); self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span); false } (true, Some(AssocOp::LAnd)) | (true, Some(AssocOp::LOr)) | (true, Some(AssocOp::BitOr)) => { // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`. Separated from the // above due to #74233. // These cases are ambiguous and can't be identified in the parser alone. // // Bitwise AND is left out because guessing intent is hard. We can make // suggestions based on the assumption that double-refs are rarely intentional, // and closures are distinct enough that they don't get mixed up with their // return value. let sp = self.sess.source_map().start_point(self.token.span); self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span); false } (true, Some(ref op)) if !op.can_continue_expr_unambiguously() => false, (true, Some(_)) => { self.error_found_expr_would_be_stmt(lhs); true } } } /// We've found an expression that would be parsed as a statement, /// but the next token implies this should be parsed as an expression. /// For example: `if let Some(x) = x { x } else { 0 } / 2`. fn error_found_expr_would_be_stmt(&self, lhs: &Expr) { self.sess.emit_err(FoundExprWouldBeStmt { span: self.token.span, token: self.token.clone(), suggestion: ExprParenthesesNeeded::surrounding(lhs.span), }); } /// Possibly translate the current token to an associative operator. /// The method does not advance the current token. /// /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively. fn check_assoc_op(&self) -> Option> { let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) { // When parsing const expressions, stop parsing when encountering `>`. ( Some( AssocOp::ShiftRight | AssocOp::Greater | AssocOp::GreaterEqual | AssocOp::AssignOp(token::BinOpToken::Shr), ), _, ) if self.restrictions.contains(Restrictions::CONST_EXPR) => { return None; } (Some(op), _) => (op, self.token.span), (None, Some((Ident { name: sym::and, span }, false))) => { self.sess.emit_err(InvalidLogicalOperator { span: self.token.span, incorrect: "and".into(), sub: InvalidLogicalOperatorSub::Conjunction(self.token.span), }); (AssocOp::LAnd, span) } (None, Some((Ident { name: sym::or, span }, false))) => { self.sess.emit_err(InvalidLogicalOperator { span: self.token.span, incorrect: "or".into(), sub: InvalidLogicalOperatorSub::Disjunction(self.token.span), }); (AssocOp::LOr, span) } _ => return None, }; Some(source_map::respan(span, op)) } /// Checks if this expression is a successfully parsed statement. fn expr_is_complete(&self, e: &Expr) -> bool { self.restrictions.contains(Restrictions::STMT_EXPR) && !classify::expr_requires_semi_to_be_stmt(e) } /// Parses `x..y`, `x..=y`, and `x..`/`x..=`. /// The other two variants are handled in `parse_prefix_range_expr` below. fn parse_range_expr( &mut self, prec: usize, lhs: P, op: AssocOp, cur_op_span: Span, ) -> PResult<'a, P> { let rhs = if self.is_at_start_of_range_notation_rhs() { Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?) } else { None }; let rhs_span = rhs.as_ref().map_or(cur_op_span, |x| x.span); let span = self.mk_expr_sp(&lhs, lhs.span, rhs_span); let limits = if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed }; let range = self.mk_range(Some(lhs), rhs, limits); Ok(self.mk_expr(span, range)) } fn is_at_start_of_range_notation_rhs(&self) -> bool { if self.token.can_begin_expr() { // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. if self.token == token::OpenDelim(Delimiter::Brace) { return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); } true } else { false } } /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`. fn parse_prefix_range_expr(&mut self, attrs: Option) -> PResult<'a, P> { // Check for deprecated `...` syntax. if self.token == token::DotDotDot { self.err_dotdotdot_syntax(self.token.span); } debug_assert!( [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind), "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq", self.token ); let limits = match self.token.kind { token::DotDot => RangeLimits::HalfOpen, _ => RangeLimits::Closed, }; let op = AssocOp::from_token(&self.token); // FIXME: `parse_prefix_range_expr` is called when the current // token is `DotDot`, `DotDotDot`, or `DotDotEq`. If we haven't already // parsed attributes, then trying to parse them here will always fail. // We should figure out how we want attributes on range expressions to work. let attrs = self.parse_or_use_outer_attributes(attrs)?; self.collect_tokens_for_expr(attrs, |this, attrs| { let lo = this.token.span; this.bump(); let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() { // RHS must be parsed with more associativity than the dots. this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed) .map(|x| (lo.to(x.span), Some(x)))? } else { (lo, None) }; let range = this.mk_range(None, opt_end, limits); Ok(this.mk_expr_with_attrs(span, range, attrs)) }) } /// Parses a prefix-unary-operator expr. fn parse_prefix_expr(&mut self, attrs: Option) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(attrs)?; let lo = self.token.span; macro_rules! make_it { ($this:ident, $attrs:expr, |this, _| $body:expr) => { $this.collect_tokens_for_expr($attrs, |$this, attrs| { let (hi, ex) = $body?; Ok($this.mk_expr_with_attrs(lo.to(hi), ex, attrs)) }) }; } let this = self; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() match this.token.uninterpolate().kind { token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr` token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr` token::BinOp(token::Minus) => { make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg)) } // `-expr` token::BinOp(token::Star) => { make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref)) } // `*expr` token::BinOp(token::And) | token::AndAnd => { make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo)) } token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => { let mut err = LeadingPlusNotSupported { span: lo, remove_plus: None, add_parentheses: None }; // a block on the LHS might have been intended to be an expression instead if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) { err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp)); } else { err.remove_plus = Some(lo); } this.sess.emit_err(err); this.bump(); this.parse_prefix_expr(None) } // `+expr` // Recover from `++x`: token::BinOp(token::Plus) if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) => { let prev_is_semi = this.prev_token == token::Semi; let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span)); // Eat both `+`s. this.bump(); this.bump(); let operand_expr = this.parse_dot_or_call_expr(Default::default())?; this.recover_from_prefix_increment(operand_expr, pre_span, prev_is_semi) } token::Ident(..) if this.token.is_keyword(kw::Box) => { make_it!(this, attrs, |this, _| this.parse_box_expr(lo)) } token::Ident(..) if this.is_mistaken_not_ident_negation() => { make_it!(this, attrs, |this, _| this.recover_not_expr(lo)) } _ => return this.parse_dot_or_call_expr(Some(attrs)), } } fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P)> { self.bump(); let expr = self.parse_prefix_expr(None); let (span, expr) = self.interpolated_or_expr_span(expr)?; Ok((lo.to(span), expr)) } fn parse_unary_expr(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> { let (span, expr) = self.parse_prefix_expr_common(lo)?; Ok((span, self.mk_unary(op, expr))) } // Recover on `!` suggesting for bitwise negation instead. fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { self.sess.emit_err(TildeAsUnaryOperator(lo)); self.parse_unary_expr(lo, UnOp::Not) } /// Parse `box expr`. fn parse_box_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { let (span, expr) = self.parse_prefix_expr_common(lo)?; self.sess.gated_spans.gate(sym::box_syntax, span); Ok((span, ExprKind::Box(expr))) } fn is_mistaken_not_ident_negation(&self) -> bool { let token_cannot_continue_expr = |t: &Token| match t.uninterpolate().kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, _ => t.is_whole_expr(), }; self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr) } /// Recover on `not expr` in favor of `!expr`. fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { // Emit the error... let negated_token = self.look_ahead(1, |t| t.clone()); let sub_diag = if negated_token.is_numeric_lit() { NotAsNegationOperatorSub::SuggestNotBitwise } else if negated_token.is_bool_lit() { NotAsNegationOperatorSub::SuggestNotLogical } else { NotAsNegationOperatorSub::SuggestNotDefault }; self.sess.emit_err(NotAsNegationOperator { negated: negated_token.span, negated_desc: super::token_descr(&negated_token), // Span the `not` plus trailing whitespace to avoid // trailing whitespace after the `!` in our suggestion sub: sub_diag( self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)), ), }); // ...and recover! self.parse_unary_expr(lo, UnOp::Not) } /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. fn interpolated_or_expr_span( &self, expr: PResult<'a, P>, ) -> PResult<'a, (Span, P)> { expr.map(|e| { ( match self.prev_token.kind { TokenKind::Interpolated(..) => self.prev_token.span, _ => e.span, }, e, ) }) } fn parse_assoc_op_cast( &mut self, lhs: P, lhs_span: Span, expr_kind: fn(P, P) -> ExprKind, ) -> PResult<'a, P> { let mk_expr = |this: &mut Self, lhs: P, rhs: P| { this.mk_expr(this.mk_expr_sp(&lhs, lhs_span, rhs.span), expr_kind(lhs, rhs)) }; // Save the state of the parser before parsing type normally, in case there is a // LessThan comparison after this cast. let parser_snapshot_before_type = self.clone(); let cast_expr = match self.parse_as_cast_ty() { Ok(rhs) => mk_expr(self, lhs, rhs), Err(type_err) => { // Rewind to before attempting to parse the type with generics, to recover // from situations like `x as usize < y` in which we first tried to parse // `usize < y` as a type with generic arguments. let parser_snapshot_after_type = mem::replace(self, parser_snapshot_before_type); // Check for typo of `'a: loop { break 'a }` with a missing `'`. match (&lhs.kind, &self.token.kind) { ( // `foo: ` ExprKind::Path(None, ast::Path { segments, .. }), TokenKind::Ident(kw::For | kw::Loop | kw::While, false), ) if segments.len() == 1 => { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { ident: Ident::from_str_and_span( &format!("'{}", segments[0].ident), segments[0].ident.span, ), }; match self.parse_labeled_expr(label, false) { Ok(expr) => { type_err.cancel(); self.sess.emit_err(MalformedLoopLabel { span: label.ident.span, correct_label: label.ident, }); return Ok(expr); } Err(err) => { err.cancel(); self.restore_snapshot(snapshot); } } } _ => {} } match self.parse_path(PathStyle::Expr) { Ok(path) => { let span_after_type = parser_snapshot_after_type.token.span; let expr = mk_expr( self, lhs, self.mk_ty(path.span, TyKind::Path(None, path.clone())), ); let args_span = self.look_ahead(1, |t| t.span).to(span_after_type); let suggestion = ComparisonOrShiftInterpretedAsGenericSugg { left: expr.span.shrink_to_lo(), right: expr.span.shrink_to_hi(), }; match self.token.kind { token::Lt => self.sess.emit_err(ComparisonInterpretedAsGeneric { comparison: self.token.span, r#type: path, args: args_span, suggestion, }), token::BinOp(token::Shl) => { self.sess.emit_err(ShiftInterpretedAsGeneric { shift: self.token.span, r#type: path, args: args_span, suggestion, }) } _ => { // We can end up here even without `<` being the next token, for // example because `parse_ty_no_plus` returns `Err` on keywords, // but `parse_path` returns `Ok` on them due to error recovery. // Return original error and parser state. *self = parser_snapshot_after_type; return Err(type_err); } }; // Successfully parsed the type path leaving a `<` yet to parse. type_err.cancel(); // Keep `x as usize` as an expression in AST and continue parsing. expr } Err(path_err) => { // Couldn't parse as a path, return original error and parser state. path_err.cancel(); *self = parser_snapshot_after_type; return Err(type_err); } } } }; self.parse_and_disallow_postfix_after_cast(cast_expr) } /// Parses a postfix operators such as `.`, `?`, or index (`[]`) after a cast, /// then emits an error and returns the newly parsed tree. /// The resulting parse tree for `&x as T[0]` has a precedence of `((&x) as T)[0]`. fn parse_and_disallow_postfix_after_cast( &mut self, cast_expr: P, ) -> PResult<'a, P> { let span = cast_expr.span; let (cast_kind, maybe_ascription_span) = if let ExprKind::Type(ascripted_expr, _) = &cast_expr.kind { ("type ascription", Some(ascripted_expr.span.shrink_to_hi().with_hi(span.hi()))) } else { ("cast", None) }; // Save the memory location of expr before parsing any following postfix operators. // This will be compared with the memory location of the output expression. // If they different we can assume we parsed another expression because the existing expression is not reallocated. let addr_before = &*cast_expr as *const _ as usize; let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?; let changed = addr_before != &*with_postfix as *const _ as usize; // Check if an illegal postfix operator has been added after the cast. // If the resulting expression is not a cast, or has a different memory location, it is an illegal postfix operator. if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) || changed { let msg = format!( "{cast_kind} cannot be followed by {}", match with_postfix.kind { ExprKind::Index(_, _) => "indexing", ExprKind::Try(_) => "`?`", ExprKind::Field(_, _) => "a field access", ExprKind::MethodCall(_, _, _, _) => "a method call", ExprKind::Call(_, _) => "a function call", ExprKind::Await(_) => "`.await`", ExprKind::Err => return Ok(with_postfix), _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"), } ); let mut err = self.struct_span_err(span, &msg); let suggest_parens = |err: &mut Diagnostic| { let suggestions = vec![ (span.shrink_to_lo(), "(".to_string()), (span.shrink_to_hi(), ")".to_string()), ]; err.multipart_suggestion( "try surrounding the expression in parentheses", suggestions, Applicability::MachineApplicable, ); }; // If type ascription is "likely an error", the user will already be getting a useful // help message, and doesn't need a second. if self.last_type_ascription.map_or(false, |last_ascription| last_ascription.1) { self.maybe_annotate_with_ascription(&mut err, false); } else if let Some(ascription_span) = maybe_ascription_span { let is_nightly = self.sess.unstable_features.is_nightly_build(); if is_nightly { suggest_parens(&mut err); } err.span_suggestion( ascription_span, &format!( "{}remove the type ascription", if is_nightly { "alternatively, " } else { "" } ), "", if is_nightly { Applicability::MaybeIncorrect } else { Applicability::MachineApplicable }, ); } else { suggest_parens(&mut err); } err.emit(); }; Ok(with_postfix) } fn parse_assoc_op_ascribe(&mut self, lhs: P, lhs_span: Span) -> PResult<'a, P> { let maybe_path = self.could_ascription_be_path(&lhs.kind); self.last_type_ascription = Some((self.prev_token.span, maybe_path)); let lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?; self.sess.gated_spans.gate(sym::type_ascription, lhs.span); Ok(lhs) } /// Parse `& mut? ` or `& raw [ const | mut ] `. fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { self.expect_and()?; let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon); let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below. let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo); let expr = self.parse_prefix_expr(None); let (hi, expr) = self.interpolated_or_expr_span(expr)?; let span = lo.to(hi); if let Some(lt) = lifetime { self.error_remove_borrow_lifetime(span, lt.ident.span); } Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr))) } fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) { self.sess.emit_err(LifetimeInBorrowExpression { span, lifetime_span: lt_span }); } /// Parse `mut?` or `raw [ const | mut ]`. fn parse_borrow_modifiers(&mut self, lo: Span) -> (ast::BorrowKind, ast::Mutability) { if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) { // `raw [ const | mut ]`. let found_raw = self.eat_keyword(kw::Raw); assert!(found_raw); let mutability = self.parse_const_or_mut().unwrap(); self.sess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span)); (ast::BorrowKind::Raw, mutability) } else { // `mut?` (ast::BorrowKind::Ref, self.parse_mutability()) } } /// Parses `a.b` or `a(13)` or `a[4]` or just `a`. fn parse_dot_or_call_expr(&mut self, attrs: Option) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(attrs)?; self.collect_tokens_for_expr(attrs, |this, attrs| { let base = this.parse_bottom_expr(); let (span, base) = this.interpolated_or_expr_span(base)?; this.parse_dot_or_call_expr_with(base, span, attrs) }) } pub(super) fn parse_dot_or_call_expr_with( &mut self, e0: P, lo: Span, mut attrs: ast::AttrVec, ) -> PResult<'a, P> { // Stitch the list of outer attributes onto the return value. // A little bit ugly, but the best way given the current code // structure let res = self.parse_dot_or_call_expr_with_(e0, lo); if attrs.is_empty() { res } else { res.map(|expr| { expr.map(|mut expr| { attrs.extend(expr.attrs); expr.attrs = attrs; expr }) }) } } fn parse_dot_or_call_expr_with_(&mut self, mut e: P, lo: Span) -> PResult<'a, P> { loop { let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { // we are using noexpect here because we don't expect a `?` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Question) } else { self.eat(&token::Question) }; if has_question { // `expr?` e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e)); continue; } let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { // we are using noexpect here because we don't expect a `.` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Dot) } else { self.eat(&token::Dot) }; if has_dot { // expr.f e = self.parse_dot_suffix_expr(lo, e)?; continue; } if self.expr_is_complete(&e) { return Ok(e); } e = match self.token.kind { token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e), token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?, _ => return Ok(e), } } } fn look_ahead_type_ascription_as_field(&mut self) -> bool { self.look_ahead(1, |t| t.is_ident()) && self.look_ahead(2, |t| t == &token::Colon) && self.look_ahead(3, |t| t.can_begin_expr()) } fn parse_dot_suffix_expr(&mut self, lo: Span, base: P) -> PResult<'a, P> { match self.token.uninterpolate().kind { token::Ident(..) => self.parse_dot_suffix(base, lo), token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None)) } token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => { Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix)) } _ => { self.error_unexpected_after_dot(); Ok(base) } } } fn error_unexpected_after_dot(&self) { // FIXME Could factor this out into non_fatal_unexpected or something. let actual = pprust::token_to_string(&self.token); self.struct_span_err(self.token.span, &format!("unexpected token: `{actual}`")).emit(); } // We need an identifier or integer, but the next token is a float. // Break the float into components to extract the identifier or integer. // FIXME: With current `TokenCursor` it's hard to break tokens into more than 2 // parts unless those parts are processed immediately. `TokenCursor` should either // support pushing "future tokens" (would be also helpful to `break_and_eat`), or // we should break everything including floats into more basic proc-macro style // tokens in the lexer (probably preferable). fn parse_tuple_field_access_expr_float( &mut self, lo: Span, base: P, float: Symbol, suffix: Option, ) -> P { #[derive(Debug)] enum FloatComponent { IdentLike(String), Punct(char), } use FloatComponent::*; let float_str = float.as_str(); let mut components = Vec::new(); let mut ident_like = String::new(); for c in float_str.chars() { if c == '_' || c.is_ascii_alphanumeric() { ident_like.push(c); } else if matches!(c, '.' | '+' | '-') { if !ident_like.is_empty() { components.push(IdentLike(mem::take(&mut ident_like))); } components.push(Punct(c)); } else { panic!("unexpected character in a float token: {:?}", c) } } if !ident_like.is_empty() { components.push(IdentLike(ident_like)); } // With proc macros the span can refer to anything, the source may be too short, // or too long, or non-ASCII. It only makes sense to break our span into components // if its underlying text is identical to our float literal. let span = self.token.span; let can_take_span_apart = || self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref(); match &*components { // 1e2 [IdentLike(i)] => { self.parse_tuple_field_access_expr(lo, base, Symbol::intern(&i), suffix, None) } // 1. [IdentLike(i), Punct('.')] => { let (ident_span, dot_span) = if can_take_span_apart() { let (span, ident_len) = (span.data(), BytePos::from_usize(i.len())); let ident_span = span.with_hi(span.lo + ident_len); let dot_span = span.with_lo(span.lo + ident_len); (ident_span, dot_span) } else { (span, span) }; assert!(suffix.is_none()); let symbol = Symbol::intern(&i); self.token = Token::new(token::Ident(symbol, false), ident_span); let next_token = (Token::new(token::Dot, dot_span), self.token_spacing); self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token)) } // 1.2 | 1.2e3 [IdentLike(i1), Punct('.'), IdentLike(i2)] => { let (ident1_span, dot_span, ident2_span) = if can_take_span_apart() { let (span, ident1_len) = (span.data(), BytePos::from_usize(i1.len())); let ident1_span = span.with_hi(span.lo + ident1_len); let dot_span = span .with_lo(span.lo + ident1_len) .with_hi(span.lo + ident1_len + BytePos(1)); let ident2_span = self.token.span.with_lo(span.lo + ident1_len + BytePos(1)); (ident1_span, dot_span, ident2_span) } else { (span, span, span) }; let symbol1 = Symbol::intern(&i1); self.token = Token::new(token::Ident(symbol1, false), ident1_span); // This needs to be `Spacing::Alone` to prevent regressions. // See issue #76399 and PR #76285 for more details let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone); let base1 = self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1)); let symbol2 = Symbol::intern(&i2); let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); self.bump_with((next_token2, self.token_spacing)); // `.` self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None) } // 1e+ | 1e- (recovered) [IdentLike(_), Punct('+' | '-')] | // 1e+2 | 1e-2 [IdentLike(_), Punct('+' | '-'), IdentLike(_)] | // 1.2e+ | 1.2e- [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-')] | // 1.2e+3 | 1.2e-3 [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => { // See the FIXME about `TokenCursor` above. self.error_unexpected_after_dot(); base } _ => panic!("unexpected components in a float token: {:?}", components), } } fn parse_tuple_field_access_expr( &mut self, lo: Span, base: P, field: Symbol, suffix: Option, next_token: Option<(Token, Spacing)>, ) -> P { match next_token { Some(next_token) => self.bump_with(next_token), None => self.bump(), } let span = self.prev_token.span; let field = ExprKind::Field(base, Ident::new(field, span)); if let Some(suffix) = suffix { self.expect_no_tuple_index_suffix(span, suffix); } self.mk_expr(lo.to(span), field) } /// Parse a function call expression, `expr(...)`. fn parse_fn_call_expr(&mut self, lo: Span, fun: P) -> P { let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) && self.look_ahead_type_ascription_as_field() { Some((self.create_snapshot_for_diagnostic(), fun.kind.clone())) } else { None }; let open_paren = self.token.span; let mut seq = self .parse_paren_expr_seq() .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args))); if let Some(expr) = self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot) { return expr; } self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq) } /// If we encounter a parser state that looks like the user has written a `struct` literal with /// parentheses instead of braces, recover the parser state and provide suggestions. #[instrument(skip(self, seq, snapshot), level = "trace")] fn maybe_recover_struct_lit_bad_delims( &mut self, lo: Span, open_paren: Span, seq: &mut PResult<'a, P>, snapshot: Option<(SnapshotParser<'a>, ExprKind)>, ) -> Option> { match (seq.as_mut(), snapshot) { (Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => { snapshot.bump(); // `(` match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) { Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) => { // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. self.restore_snapshot(snapshot); let close_paren = self.prev_token.span; let span = lo.to(self.prev_token.span); if !fields.is_empty() { let mut replacement_err = ParenthesesWithStructFields { span, r#type: path, braces_for_struct: BracesForStructLiteral { first: open_paren, second: close_paren, }, no_fields_for_fn: NoFieldsForFnCall { fields: fields .into_iter() .map(|field| field.span.until(field.expr.span)) .collect(), }, } .into_diagnostic(&self.sess.span_diagnostic); replacement_err.emit(); let old_err = mem::replace(err, replacement_err); old_err.cancel(); } else { err.emit(); } return Some(self.mk_expr_err(span)); } Ok(_) => {} Err(mut err) => { err.emit(); } } } _ => {} } None } /// Parse an indexing expression `expr[...]`. fn parse_index_expr(&mut self, lo: Span, base: P) -> PResult<'a, P> { let prev_span = self.prev_token.span; let open_delim_span = self.token.span; self.bump(); // `[` let index = self.parse_expr()?; self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?; self.expect(&token::CloseDelim(Delimiter::Bracket))?; Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index))) } /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P, lo: Span) -> PResult<'a, P> { if self.token.uninterpolated_span().rust_2018() && self.eat_keyword(kw::Await) { return Ok(self.mk_await_expr(self_arg, lo)); } let fn_span_lo = self.token.span; let mut segment = self.parse_path_segment(PathStyle::Expr, None)?; self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]); self.check_turbofish_missing_angle_brackets(&mut segment); if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { // Method call `expr.f()` let args = self.parse_paren_expr_seq()?; let fn_span = fn_span_lo.to(self.prev_token.span); let span = lo.to(self.prev_token.span); Ok(self.mk_expr(span, ExprKind::MethodCall(segment, self_arg, args, fn_span))) } else { // Field access `expr.f` if let Some(args) = segment.args { self.sess.emit_err(FieldExpressionWithGeneric(args.span())); } let span = lo.to(self.prev_token.span); Ok(self.mk_expr(span, ExprKind::Field(self_arg, segment.ident))) } } /// At the bottom (top?) of the precedence hierarchy, /// Parses things like parenthesized exprs, macros, `return`, etc. /// /// N.B., this does not parse outer attributes, and is private because it only works /// correctly if called from `parse_dot_or_call_expr()`. fn parse_bottom_expr(&mut self) -> PResult<'a, P> { maybe_recover_from_interpolated_ty_qpath!(self, true); maybe_whole_expr!(self); // Outer attributes are already parsed and will be // added to the return value after the fact. // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`. let lo = self.token.span; if let token::Literal(_) = self.token.kind { // This match arm is a special-case of the `_` match arm below and // could be removed without changing functionality, but it's faster // to have it here, especially for programs with large constants. self.parse_lit_expr() } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { self.parse_tuple_parens_expr() } else if self.check(&token::OpenDelim(Delimiter::Brace)) { self.parse_block_expr(None, lo, BlockCheckMode::Default) } else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) { self.parse_closure_expr().map_err(|mut err| { // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }` // then suggest parens around the lhs. if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&lo) { err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp)); } err }) } else if self.check(&token::OpenDelim(Delimiter::Bracket)) { self.parse_array_or_repeat_expr(Delimiter::Bracket) } else if self.check_path() { self.parse_path_start_expr() } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) { self.parse_closure_expr() } else if self.eat_keyword(kw::If) { self.parse_if_expr() } else if self.check_keyword(kw::For) { if self.choose_generics_over_qpath(1) { self.parse_closure_expr() } else { assert!(self.eat_keyword(kw::For)); self.parse_for_expr(None, self.prev_token.span) } } else if self.eat_keyword(kw::While) { self.parse_while_expr(None, self.prev_token.span) } else if let Some(label) = self.eat_label() { self.parse_labeled_expr(label, true) } else if self.eat_keyword(kw::Loop) { let sp = self.prev_token.span; self.parse_loop_expr(None, self.prev_token.span).map_err(|mut err| { err.span_label(sp, "while parsing this `loop` expression"); err }) } else if self.eat_keyword(kw::Continue) { let kind = ExprKind::Continue(self.eat_label()); Ok(self.mk_expr(lo.to(self.prev_token.span), kind)) } else if self.eat_keyword(kw::Match) { let match_sp = self.prev_token.span; self.parse_match_expr().map_err(|mut err| { err.span_label(match_sp, "while parsing this `match` expression"); err }) } else if self.eat_keyword(kw::Unsafe) { let sp = self.prev_token.span; self.parse_block_expr(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err( |mut err| { err.span_label(sp, "while parsing this `unsafe` expression"); err }, ) } else if self.check_inline_const(0) { self.parse_const_block(lo.to(self.token.span), false) } else if self.is_do_catch_block() { self.recover_do_catch() } else if self.is_try_block() { self.expect_keyword(kw::Try)?; self.parse_try_block(lo) } else if self.eat_keyword(kw::Return) { self.parse_return_expr() } else if self.eat_keyword(kw::Break) { self.parse_break_expr() } else if self.eat_keyword(kw::Yield) { self.parse_yield_expr() } else if self.is_do_yeet() { self.parse_yeet_expr() } else if self.check_keyword(kw::Let) { self.parse_let_expr() } else if self.eat_keyword(kw::Underscore) { Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore)) } else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) { // Don't complain about bare semicolons after unclosed braces // recovery in order to keep the error count down. Fixing the // delimiters will possibly also fix the bare semicolon found in // expression context. For example, silence the following error: // // error: expected expression, found `;` // --> file.rs:2:13 // | // 2 | foo(bar(; // | ^ expected expression self.bump(); Ok(self.mk_expr_err(self.token.span)) } else if self.token.uninterpolated_span().rust_2018() { // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly. if self.check_keyword(kw::Async) { if self.is_async_block() { // Check for `async {` and `async move {`. self.parse_async_block() } else { self.parse_closure_expr() } } else if self.eat_keyword(kw::Await) { self.recover_incorrect_await_syntax(lo, self.prev_token.span) } else { self.parse_lit_expr() } } else { self.parse_lit_expr() } } fn parse_lit_expr(&mut self) -> PResult<'a, P> { let lo = self.token.span; match self.parse_opt_lit() { Some(literal) => { let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal)); self.maybe_recover_from_bad_qpath(expr) } None => self.try_macro_suggestion(), } } fn parse_tuple_parens_expr(&mut self) -> PResult<'a, P> { let lo = self.token.span; self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let (es, trailing_comma) = match self.parse_seq_to_end( &token::CloseDelim(Delimiter::Parenthesis), SeqSep::trailing_allowed(token::Comma), |p| p.parse_expr_catch_underscore(), ) { Ok(x) => x, Err(err) => { return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err))); } }; let kind = if es.len() == 1 && !trailing_comma { // `(e)` is parenthesized `e`. ExprKind::Paren(es.into_iter().next().unwrap()) } else { // `(e,)` is a tuple with only one field, `e`. ExprKind::Tup(es) }; let expr = self.mk_expr(lo.to(self.prev_token.span), kind); self.maybe_recover_from_bad_qpath(expr) } fn parse_array_or_repeat_expr(&mut self, close_delim: Delimiter) -> PResult<'a, P> { let lo = self.token.span; self.bump(); // `[` or other open delim let close = &token::CloseDelim(close_delim); let kind = if self.eat(close) { // Empty vector ExprKind::Array(Vec::new()) } else { // Non-empty vector let first_expr = self.parse_expr()?; if self.eat(&token::Semi) { // Repeating array syntax: `[ 0; 512 ]` let count = self.parse_anon_const_expr()?; self.expect(close)?; ExprKind::Repeat(first_expr, count) } else if self.eat(&token::Comma) { // Vector with two or more elements. let sep = SeqSep::trailing_allowed(token::Comma); let (remaining_exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?; let mut exprs = vec![first_expr]; exprs.extend(remaining_exprs); ExprKind::Array(exprs) } else { // Vector with one element self.expect(close)?; ExprKind::Array(vec![first_expr]) } }; let expr = self.mk_expr(lo.to(self.prev_token.span), kind); self.maybe_recover_from_bad_qpath(expr) } fn parse_path_start_expr(&mut self) -> PResult<'a, P> { let (qself, path) = if self.eat_lt() { let (qself, path) = self.parse_qpath(PathStyle::Expr)?; (Some(qself), path) } else { (None, self.parse_path(PathStyle::Expr)?) }; // `!`, as an operator, is prefix, so we know this isn't that. let (span, kind) = if self.eat(&token::Not) { // MACRO INVOCATION expression if qself.is_some() { self.sess.emit_err(MacroInvocationWithQualifiedPath(path.span)); } let lo = path.span; let mac = P(MacCall { path, args: self.parse_mac_args()?, prior_type_ascription: self.last_type_ascription, }); (lo.to(self.prev_token.span), ExprKind::MacCall(mac)) } else if self.check(&token::OpenDelim(Delimiter::Brace)) && let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path) { if qself.is_some() { self.sess.gated_spans.gate(sym::more_qualified_paths, path.span); } return expr; } else { (path.span, ExprKind::Path(qself, path)) }; let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `'label: $expr`. The label is already parsed. fn parse_labeled_expr( &mut self, label_: Label, mut consume_colon: bool, ) -> PResult<'a, P> { let lo = label_.ident.span; let label = Some(label_); let ate_colon = self.eat(&token::Colon); let expr = if self.eat_keyword(kw::While) { self.parse_while_expr(label, lo) } else if self.eat_keyword(kw::For) { self.parse_for_expr(label, lo) } else if self.eat_keyword(kw::Loop) { self.parse_loop_expr(label, lo) } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() { self.parse_block_expr(label, lo, BlockCheckMode::Default) } else if !ate_colon && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma) || self.token.is_op()) { let lit = self.recover_unclosed_char(label_.ident, |self_| { self_.sess.create_err(UnexpectedTokenAfterLabel { span: self_.token.span, remove_label: None, enclose_in_block: None, }) }); consume_colon = false; Ok(self.mk_expr(lo, ExprKind::Lit(lit))) } else if !ate_colon && (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt)) { // We're probably inside of a `Path<'a>` that needs a turbofish self.sess.emit_err(UnexpectedTokenAfterLabel { span: self.token.span, remove_label: None, enclose_in_block: None, }); consume_colon = false; Ok(self.mk_expr_err(lo)) } else { let mut err = UnexpectedTokenAfterLabel { span: self.token.span, remove_label: None, enclose_in_block: None, }; // Continue as an expression in an effort to recover on `'label: non_block_expr`. let expr = self.parse_expr().map(|expr| { let span = expr.span; let found_labeled_breaks = { struct FindLabeledBreaksVisitor(bool); impl<'ast> Visitor<'ast> for FindLabeledBreaksVisitor { fn visit_expr_post(&mut self, ex: &'ast Expr) { if let ExprKind::Break(Some(_label), _) = ex.kind { self.0 = true; } } } let mut vis = FindLabeledBreaksVisitor(false); vis.visit_expr(&expr); vis.0 }; // Suggestion involves adding a (as of time of writing this, unstable) labeled block. // // If there are no breaks that may use this label, suggest removing the label and // recover to the unmodified expression. if !found_labeled_breaks { err.remove_label = Some(lo.until(span)); return expr; } err.enclose_in_block = Some(UnexpectedTokenAfterLabelSugg { left: span.shrink_to_lo(), right: span.shrink_to_hi(), }); // Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`. let stmt = self.mk_stmt(span, StmtKind::Expr(expr)); let blk = self.mk_block(vec![stmt], BlockCheckMode::Default, span); self.mk_expr(span, ExprKind::Block(blk, label)) }); self.sess.emit_err(err); expr }?; if !ate_colon && consume_colon { self.sess.emit_err(RequireColonAfterLabeledExpression { span: expr.span, label: lo, label_end: lo.shrink_to_hi(), }); } Ok(expr) } /// Emit an error when a char is parsed as a lifetime because of a missing quote pub(super) fn recover_unclosed_char( &mut self, lifetime: Ident, err: impl FnOnce(&mut Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>, ) -> ast::Lit { if let Some(mut diag) = self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar) { diag.span_suggestion_verbose( lifetime.span.shrink_to_hi(), "add `'` to close the char literal", "'", Applicability::MaybeIncorrect, ) .emit(); } else { err(self) .span_suggestion_verbose( lifetime.span.shrink_to_hi(), "add `'` to close the char literal", "'", Applicability::MaybeIncorrect, ) .emit(); } ast::Lit { token_lit: token::Lit::new(token::LitKind::Char, lifetime.name, None), kind: ast::LitKind::Char(lifetime.name.as_str().chars().next().unwrap_or('_')), span: lifetime.span, } } /// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead. fn recover_do_catch(&mut self) -> PResult<'a, P> { let lo = self.token.span; self.bump(); // `do` self.bump(); // `catch` let span = lo.to(self.prev_token.span); self.sess.emit_err(DoCatchSyntaxRemoved { span }); self.parse_try_block(lo) } /// Parse an expression if the token can begin one. fn parse_expr_opt(&mut self) -> PResult<'a, Option>> { Ok(if self.token.can_begin_expr() { Some(self.parse_expr()?) } else { None }) } /// Parse `"return" expr?`. fn parse_return_expr(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let kind = ExprKind::Ret(self.parse_expr_opt()?); let expr = self.mk_expr(lo.to(self.prev_token.span), kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `"do" "yeet" expr?`. fn parse_yeet_expr(&mut self) -> PResult<'a, P> { let lo = self.token.span; self.bump(); // `do` self.bump(); // `yeet` let kind = ExprKind::Yeet(self.parse_expr_opt()?); let span = lo.to(self.prev_token.span); self.sess.gated_spans.gate(sym::yeet_expr, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten. /// If the label is followed immediately by a `:` token, the label and `:` are /// parsed as part of the expression (i.e. a labeled loop). The language team has /// decided in #87026 to require parentheses as a visual aid to avoid confusion if /// the break expression of an unlabeled break is a labeled loop (as in /// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value /// expression only gets a warning for compatibility reasons; and a labeled break /// with a labeled loop does not even get a warning because there is no ambiguity. fn parse_break_expr(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let mut label = self.eat_label(); let kind = if label.is_some() && self.token == token::Colon { // The value expression can be a labeled loop, see issue #86948, e.g.: // `loop { break 'label: loop { break 'label 42; }; }` let lexpr = self.parse_labeled_expr(label.take().unwrap(), true)?; self.sess.emit_err(LabeledLoopInBreak { span: lexpr.span, sub: WrapExpressionInParentheses { left: lexpr.span.shrink_to_lo(), right: lexpr.span.shrink_to_hi(), }, }); Some(lexpr) } else if self.token != token::OpenDelim(Delimiter::Brace) || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) { let expr = self.parse_expr_opt()?; if let Some(ref expr) = expr { if label.is_some() && matches!( expr.kind, ExprKind::While(_, _, None) | ExprKind::ForLoop(_, _, _, None) | ExprKind::Loop(_, None) | ExprKind::Block(_, None) ) { self.sess.buffer_lint_with_diagnostic( BREAK_WITH_LABEL_AND_LOOP, lo.to(expr.span), ast::CRATE_NODE_ID, "this labeled break expression is easy to confuse with an unlabeled break with a labeled value expression", BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span), ); } } expr } else { None }; let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Break(label, kind)); self.maybe_recover_from_bad_qpath(expr) } /// Parse `"yield" expr?`. fn parse_yield_expr(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let kind = ExprKind::Yield(self.parse_expr_opt()?); let span = lo.to(self.prev_token.span); self.sess.gated_spans.gate(sym::generators, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } /// Returns a string literal if the next token is a string literal. /// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind, /// and returns `None` if the next token is not literal at all. pub fn parse_str_lit(&mut self) -> Result> { match self.parse_opt_lit() { Some(lit) => match lit.kind { ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit { style, symbol: lit.token_lit.symbol, suffix: lit.token_lit.suffix, span: lit.span, symbol_unescaped, }), _ => Err(Some(lit)), }, None => Err(None), } } pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> { self.parse_opt_lit().ok_or(()).or_else(|()| { if let token::Interpolated(inner) = &self.token.kind { let expr = match inner.as_ref() { token::NtExpr(expr) => Some(expr), token::NtLiteral(expr) => Some(expr), _ => None, }; if let Some(expr) = expr { if matches!(expr.kind, ExprKind::Err) { let mut err = InvalidInterpolatedExpression { span: self.token.span } .into_diagnostic(&self.sess.span_diagnostic); err.downgrade_to_delayed_bug(); return Err(err); } } } let token = self.token.clone(); let err = |self_: &mut Self| { let msg = format!("unexpected token: {}", super::token_descr(&token)); self_.struct_span_err(token.span, &msg) }; // On an error path, eagerly consider a lifetime to be an unclosed character lit if self.token.is_lifetime() { let lt = self.expect_lifetime(); Ok(self.recover_unclosed_char(lt.ident, err)) } else { Err(err(self)) } }) } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. pub(super) fn parse_opt_lit(&mut self) -> Option { let mut recovered = None; if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where // dot would follow an optional literal, so we do this unconditionally. recovered = self.look_ahead(1, |next_token| { if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = next_token.kind { if self.token.span.hi() == next_token.span.lo() { let s = String::from("0.") + symbol.as_str(); let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); return Some(Token::new(kind, self.token.span.to(next_token.span))); } } None }); if let Some(token) = &recovered { self.bump(); self.sess.emit_err(FloatLiteralRequiresIntegerPart { span: token.span, correct: pprust::token_to_string(token).into_owned(), }); } } let token = recovered.as_ref().unwrap_or(&self.token); match Lit::from_token(token) { Ok(lit) => { self.bump(); Some(lit) } Err(LitError::NotLiteral) => None, Err(err) => { let span = token.span; let token::Literal(lit) = token.kind else { unreachable!(); }; self.bump(); self.report_lit_error(err, lit, span); // Pack possible quotes and prefixes from the original literal into // the error literal's symbol so they can be pretty-printed faithfully. let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); let symbol = Symbol::intern(&suffixless_lit.to_string()); let lit = token::Lit::new(token::Err, symbol, lit.suffix); Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!())) } } } fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) { // Checks if `s` looks like i32 or u1234 etc. fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit()) } // Try to lowercase the prefix if it's a valid base prefix. fn fix_base_capitalisation(s: &str) -> Option { if let Some(stripped) = s.strip_prefix('B') { Some(format!("0b{stripped}")) } else if let Some(stripped) = s.strip_prefix('O') { Some(format!("0o{stripped}")) } else if let Some(stripped) = s.strip_prefix('X') { Some(format!("0x{stripped}")) } else { None } } let token::Lit { kind, suffix, .. } = lit; match err { // `NotLiteral` is not an error by itself, so we don't report // it and give the parser opportunity to try something else. LitError::NotLiteral => {} // `LexerError` *is* an error, but it was already reported // by lexer, so here we don't report it the second time. LitError::LexerError => {} LitError::InvalidSuffix => { if let Some(suffix) = suffix { self.sess.emit_err(InvalidLiteralSuffix { span, kind: format!("{}", kind.descr()), suffix, }); } } LitError::InvalidIntSuffix => { let suf = suffix.expect("suffix error with no suffix"); let suf = suf.as_str(); if looks_like_width_suffix(&['i', 'u'], &suf) { // If it looks like a width, try to be helpful. self.sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() }); } else if let Some(fixed) = fix_base_capitalisation(suf) { self.sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed }); } else { self.sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() }); } } LitError::InvalidFloatSuffix => { let suf = suffix.expect("suffix error with no suffix"); let suf = suf.as_str(); if looks_like_width_suffix(&['f'], suf) { // If it looks like a width, try to be helpful. self.sess .emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() }); } else { self.sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() }); } } LitError::NonDecimalFloat(base) => { match base { 16 => self.sess.emit_err(HexadecimalFloatLiteralNotSupported { span }), 8 => self.sess.emit_err(OctalFloatLiteralNotSupported { span }), 2 => self.sess.emit_err(BinaryFloatLiteralNotSupported { span }), _ => unreachable!(), }; } LitError::IntTooLarge => { self.sess.emit_err(IntLiteralTooLarge { span }); } } } pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) { if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) { // #59553: warn instead of reject out of hand to allow the fix to percolate // through the ecosystem when people fix their macros self.sess.emit_warning(InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: Some(()), }); } else { self.sess.emit_err(InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: None }); } } /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). /// Keep this in sync with `Token::can_begin_literal_maybe_minus`. pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); let lo = self.token.span; let minus_present = self.eat(&token::BinOp(token::Minus)); let lit = self.parse_lit()?; let expr = self.mk_expr(lit.span, ExprKind::Lit(lit)); if minus_present { Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr))) } else { Ok(expr) } } fn is_array_like_block(&mut self) -> bool { self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) && self.look_ahead(2, |t| t == &token::Comma) && self.look_ahead(3, |t| t.can_begin_expr()) } /// Emits a suggestion if it looks like the user meant an array but /// accidentally used braces, causing the code to be interpreted as a block /// expression. fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option> { let mut snapshot = self.create_snapshot_for_diagnostic(); match snapshot.parse_array_or_repeat_expr(Delimiter::Brace) { Ok(arr) => { self.sess.emit_err(ArrayBracketsInsteadOfSpaces { span: arr.span, sub: ArrayBracketsInsteadOfSpacesSugg { left: lo, right: snapshot.prev_token.span, }, }); self.restore_snapshot(snapshot); Some(self.mk_expr_err(arr.span)) } Err(e) => { e.cancel(); None } } } fn suggest_missing_semicolon_before_array( &self, prev_span: Span, open_delim_span: Span, ) -> PResult<'a, ()> { if self.token.kind == token::Comma { if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) { return Ok(()); } let mut snapshot = self.create_snapshot_for_diagnostic(); snapshot.bump(); match snapshot.parse_seq_to_before_end( &token::CloseDelim(Delimiter::Bracket), SeqSep::trailing_allowed(token::Comma), |p| p.parse_expr(), ) { Ok(_) // When the close delim is `)`, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`, // but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`. // This is because the `token.kind` of the close delim is treated as the same as // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different. // Therefore, `token.kind` should not be compared here. if snapshot .span_to_snippet(snapshot.token.span) .map_or(false, |snippet| snippet == "]") => { return Err(MissingSemicolonBeforeArray { open_delim: open_delim_span, semicolon: prev_span.shrink_to_hi(), }.into_diagnostic(&self.sess.span_diagnostic)); } Ok(_) => (), Err(err) => err.cancel(), } } Ok(()) } /// Parses a block or unsafe block. pub(super) fn parse_block_expr( &mut self, opt_label: Option